diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml
index 0316734..b829e9f 100644
--- a/.github/workflows/test-action.yml
+++ b/.github/workflows/test-action.yml
@@ -3,8 +3,6 @@ name: Test Action
on:
pull_request:
types: [opened, edited, synchronize, reopened, closed]
- paths:
- - 'test/**'
jobs:
get-downstream-assets:
@@ -31,4 +29,3 @@ jobs:
beta: Wide World Importers PE1
test-action: Wide World Importers PE1
IGNORE_MODEL_ALIAS_MATCHING: true
- ATLAN_CONFIG: .atlan/config.yaml
diff --git a/.gitignore b/.gitignore
index b25fc27..ecd4e64 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,7 +4,4 @@
node_modules/
event.json
.idea
-.DS_Store
-.vscode/
-contracts/
-.atlan/
\ No newline at end of file
+.DS_Store
\ No newline at end of file
diff --git a/README.md b/README.md
index cd7a69a..eba597d 100644
--- a/README.md
+++ b/README.md
@@ -68,7 +68,6 @@ After you've completed the configuration above, create a pull request with a cha
| `ATLAN_API_TOKEN` | Needed for authenticating API requests to the user's tenant. https://ask.atlan.com/hc/en-us/articles/8312649180049 | true |
| `DBT_ENVIRONMENT_BRANCH_MAP` | Map Github branch with specific dbt environment, if you do this - Atlan Github action will pick lineage for that specific environment from Atlan.You can provide the mapping like `branch name`: `dbt environment name`.
main: DBT-DEMO-PROD
beta: Wide World Importers PE1
test-action: Wide World Importers PE1 | false |
| `IGNORE_MODEL_ALIAS_MATCHING` | By default the action checks if there's an alias defined for a model in the code and looks for the relevant asset in Atlan using that alias. You can turn off matching alias name using this variable. | false | false |
-| `ATLAN_CONFIG` | The Atlan CLI configuration file is typically located at `.atlan/config.yaml`. Setting the `ATLAN_CONFIG` environment variable will trigger impact analysis on Atlan Data Contracts, if included in a GitHub pull request. | false | |
## FAQs
diff --git a/action.yml b/action.yml
index e395d45..ebc4883 100644
--- a/action.yml
+++ b/action.yml
@@ -19,9 +19,6 @@ inputs:
description: "Ignore model alias matching"
required: false
default: false
- ATLAN_CONFIG:
- description: "Atlan CLI config file location"
- required: false
runs:
using: "node16"
main: "dist/index.js"
diff --git a/adapters/api/get-asset-classifications.js b/adapters/api/get-asset-classifications.js
deleted file mode 100644
index a8f1cea..0000000
--- a/adapters/api/get-asset-classifications.js
+++ /dev/null
@@ -1,33 +0,0 @@
-import {
- ATLAN_API_TOKEN,
- ATLAN_INSTANCE_URL,
-} from "../utils/get-environment-variables.js";
-
-import fetch from "node-fetch";
-
-export default async function getAssetClassifications() {
- var myHeaders = {
- Authorization: `Bearer ${ATLAN_API_TOKEN}`,
- "Content-Type": "application/json",
- };
-
- var requestOptions = {
- method: "GET",
- headers: myHeaders,
- redirect: "follow",
- };
-
- var response = await fetch(
- `${ATLAN_INSTANCE_URL}/api/meta/types/typedefs?type=classification`,
- requestOptions
- )
- .then((e) => e.json())
- .catch((err) => {
- return {
- error: err
- }
- });
- if (response.error) return response
-
- return response?.classificationDefs;
- }
\ No newline at end of file
diff --git a/adapters/api/get-classifications.js b/adapters/api/get-classifications.js
index 40aab2f..cc954f2 100644
--- a/adapters/api/get-classifications.js
+++ b/adapters/api/get-classifications.js
@@ -1,10 +1,9 @@
+import fetch from "node-fetch";
import {
- ATLAN_API_TOKEN,
ATLAN_INSTANCE_URL,
+ ATLAN_API_TOKEN,
} from "../utils/get-environment-variables.js";
-import fetch from "node-fetch";
-
export default async function getClassifications({
sendSegmentEventOfIntegration,
}) {
@@ -35,4 +34,4 @@ export default async function getClassifications({
});
return response?.classificationDefs;
-}
\ No newline at end of file
+}
diff --git a/adapters/api/get-contract-asset.js b/adapters/api/get-contract-asset.js
deleted file mode 100644
index 88cc3b9..0000000
--- a/adapters/api/get-contract-asset.js
+++ /dev/null
@@ -1,102 +0,0 @@
-import {
- ATLAN_API_TOKEN,
- ATLAN_INSTANCE_URL,
-} from "../utils/get-environment-variables.js";
-
-import fetch from "node-fetch";
-import {
- getErrorAssetNotFound,
-} from "../templates/atlan.js";
-import stringify from "json-stringify-safe";
-
-export default async function getContractAsset({
- dataset,
- assetQualifiedName,
-}) {
- var myHeaders = {
- Authorization: `Bearer ${ATLAN_API_TOKEN}`,
- "Content-Type": "application/json",
- };
-
- var raw = stringify(
- {
- dsl: {
- from: 0,
- size: 1,
- query: {
- bool: {
- must: [
- {
- match: {
- __state: "ACTIVE"
- }
- },
- {
- term: {
- qualifiedName: assetQualifiedName
- }
- },
- {
- terms: {
- "__typeName.keyword": [
- "Table",
- "MaterialisedView",
- "View"
- ]
- }
- }
- ]
- }
- }
- },
- attributes: [
- "guid",
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
- ],
- suppressLogs: true,
- showSearchScore: false,
- excludeClassifications: true,
- includeClassificationNames: true,
- excludeMeanings: false
- }
- );
-
- var requestOptions = {
- method: "POST",
- headers: myHeaders,
- body: raw,
- };
-
- var response = await fetch(
- `${ATLAN_INSTANCE_URL}/api/meta/search/indexsearch`,
- requestOptions
- )
- .then((e) => e.json())
- .catch((err) => {
- return {
- error: err,
- comment: getErrorAssetNotFound(dataset)
- }
- });
-
- if (!response?.entities?.length) {
- return {
- error: "asset not found",
- comment: getErrorAssetNotFound(dataset),
- };
- }
-
- return response.entities[0];
-}
diff --git a/adapters/api/get-downstream-assets.js b/adapters/api/get-downstream-assets.js
index 61dd0bf..8dec807 100644
--- a/adapters/api/get-downstream-assets.js
+++ b/adapters/api/get-downstream-assets.js
@@ -1,15 +1,14 @@
+import fetch from "node-fetch";
import {
- ATLAN_API_TOKEN,
- ATLAN_INSTANCE_URL,
-} from "../utils/get-environment-variables.js";
-import {
- getCertificationImage,
getConnectorImage,
+ getCertificationImage,
getImageURL,
} from "../utils/index.js";
-
-import fetch from "node-fetch";
import stringify from "json-stringify-safe";
+import {
+ ATLAN_INSTANCE_URL,
+ ATLAN_API_TOKEN,
+} from "../utils/get-environment-variables.js";
const ASSETS_LIMIT = 100;
@@ -26,103 +25,43 @@ export default async function getDownstreamAssets(
};
var raw = stringify({
- "guid": guid,
- "size": Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
- "from": 0,
- "depth": 21,
- "direction": "OUTPUT",
- "entityFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataEntityMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataAttributeMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "Process"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "ColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "BIProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranColumnProcess"
- }
- ]
- },
- "entityTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "relationshipTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
+ guid: guid,
+ size: Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
+ from: 0,
+ depth: 21,
+ direction: "OUTPUT",
+ entityFilters: {
+ condition: "AND",
+ criterion: [
+ {
+ attributeName: "__typeName",
+ operator: "not_contains",
+ attributeValue: "Process",
+ },
+ {
+ attributeName: "__state",
+ operator: "eq",
+ attributeValue: "ACTIVE",
+ },
+ ],
},
- "attributes": [
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
+ attributes: [
+ "name",
+ "description",
+ "userDescription",
+ "sourceURL",
+ "qualifiedName",
+ "connectorName",
+ "certificateStatus",
+ "certificateUpdatedBy",
+ "certificateUpdatedAt",
+ "ownerUsers",
+ "ownerGroups",
+ "classificationNames",
+ "meanings",
],
- "excludeMeanings": false,
- "excludeClassifications": false
+ excludeMeanings: false,
+ excludeClassifications: false,
});
var requestOptions = {
@@ -132,26 +71,25 @@ export default async function getDownstreamAssets(
};
var handleError = (err) => {
- const comment = `
- ### ${getConnectorImage(asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_${integration}_action) ${
- asset.attributes?.certificateStatus
- ? getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }
-
- _Failed to fetch impacted assets._
-
- ${getImageURL(
- "atlan-logo",
- 15,
- 15
- )} [View lineage in Atlan](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/lineage/overview?utm_source=dbt_${integration}_action)
- `;
+ const comment = `### ${getConnectorImage(
+ asset.attributes.connectorName
+ )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/overview?utm_source=dbt_${integration}_action) ${
+ asset.attributes?.certificateStatus
+ ? getCertificationImage(asset.attributes.certificateStatus)
+ : ""
+ }
+
+_Failed to fetch impacted assets._
+
+${getImageURL(
+ "atlan-logo",
+ 15,
+ 15
+)} [View lineage in Atlan](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/lineage/overview?utm_source=dbt_${integration}_action)`;
sendSegmentEventOfIntegration({
action: "dbt_ci_action_failure",
@@ -184,180 +122,6 @@ export default async function getDownstreamAssets(
};
});
if (response.error) return response;
-
- const modifiedEntities = response.entities.filter(item => item.guid !== guid)
-
- return {...response, entities: modifiedEntities}
-}
-
-function contructCommentForDownstreamLineageFetchError({
- asset,
- utmSource
-}){
- const comment = `
- ### ${getConnectorImage(asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=${utmSource}) ${
- asset.attributes?.certificateStatus
- ? getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }
-
- _Failed to fetch impacted assets._
-
- ${getImageURL(
- "atlan-logo",
- 15,
- 15
- )} [View lineage in Atlan](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/lineage/overview?utm_source=${utmSource})
- `;
-
- return comment;
-}
-
-export async function getDownstreamLineageForAssets({
- asset,
- guid,
- totalModifiedFiles,
- utmSource
-}) {
- var myHeaders = {
- authorization: `Bearer ${ATLAN_API_TOKEN}`,
- "content-type": "application/json",
- };
-
- var raw = stringify({
- "guid": guid,
- "size": Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
- "from": 0,
- "depth": 21,
- "direction": "OUTPUT",
- "entityFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataEntityMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataAttributeMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "Process"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "ColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "BIProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranColumnProcess"
- }
- ]
- },
- "entityTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "relationshipTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "attributes": [
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
- ],
- "excludeMeanings": false,
- "excludeClassifications": false
- });
-
- var requestOptions = {
- method: "POST",
- headers: myHeaders,
- body: raw,
- };
-
- var response = await fetch(
- `${ATLAN_INSTANCE_URL}/api/meta/lineage/list`,
- requestOptions
- )
- .then((e) => {
- if (e.status === 200) {
- return e.json();
- } else {
- throw e;
- }
- })
- .catch((err) => {
- return {
- error: err,
- comment: contructCommentForDownstreamLineageFetchError({asset, utmSource}),
- };
- });
- if (response.error) return {
- error: err,
- comment: contructCommentForDownstreamLineageFetchError({asset, utmSource}),
- };
-
- const modifiedEntities = response.entities.filter(item => item.guid !== guid)
- return {...response, entities: modifiedEntities}
+ return response;
}
diff --git a/adapters/index.js b/adapters/index.js
index 5ec7530..16d2e29 100644
--- a/adapters/index.js
+++ b/adapters/index.js
@@ -1,17 +1,14 @@
+// main.js
+import { runAction } from "./gateway.js";
+import GitHubIntegration from "./integrations/github-integration.js";
+import GitLabIntegration from "./integrations/gitlab-integration.js";
import {
- GITHUB_TOKEN,
GITLAB_TOKEN,
+ GITHUB_TOKEN,
} from "./utils/get-environment-variables.js";
-import ContractIntegration from "./integrations/atlan-contract-impact-analysis-github.js";
-import GitHubIntegration from "./integrations/github-integration.js";
-import GitLabIntegration from "./integrations/gitlab-integration.js";
-// main.js
-import { runAction } from "./gateway.js";
-
async function run() {
//Add new integrations over here
- await runAction(GITHUB_TOKEN, ContractIntegration);
await runAction(GITHUB_TOKEN, GitHubIntegration);
await runAction(GITLAB_TOKEN, GitLabIntegration);
}
diff --git a/adapters/integrations/atlan-contract-impact-analysis-github.js b/adapters/integrations/atlan-contract-impact-analysis-github.js
deleted file mode 100644
index b69e906..0000000
--- a/adapters/integrations/atlan-contract-impact-analysis-github.js
+++ /dev/null
@@ -1,873 +0,0 @@
-import * as fs from 'fs';
-
-import {
- ATLAN_CONFIG,
- ATLAN_INSTANCE_URL,
- IS_DEV,
-} from "../utils/get-environment-variables.js";
-import {
- auth,
- getCertificationImage,
- getConnectorImage,
- truncate,
-} from "../utils/index.js";
-import {
- getContractAssetInfo,
- getDownstreamTable,
- getErrorResponseStatus401,
- getErrorResponseStatusUndefined,
- getViewAssetButton,
-} from "../templates/github-integration.js";
-
-import IntegrationInterface from "./contract/contract.js";
-import getAssetClassifications from "../api/get-asset-classifications.js"
-import getContractAsset from "../api/get-contract-asset.js"
-import { getContractImpactAnalysisBaseComment } from "../templates/atlan.js";
-import getDownstreamLineageForAssets from "../api/get-downstream-assets.js"
-import github from "@actions/github";
-import logger from "../logger/logger.js";
-import {
- sendSegmentEvent,
-} from "../api/index.js";
-import stringify from "json-stringify-safe";
-import yaml from 'js-yaml';
-
-var headSHA;
-const integrationName = "GITHUB_CONTRACT_IMPACT_ANALYSIS";
-const actionName = "contract_ci_action"
-const utmSource = "dbt_github_action"
-
-export default class ContractIntegration extends IntegrationInterface {
- constructor(token) {
- super(token);
- }
-
- async run() {
- try {
- const timeStart = Date.now();
- const { context } = github;
-
- const octokit = github.getOctokit(this.token);
- const { pull_request } = context?.payload;
- const { state, merged } = pull_request;
- headSHA = pull_request?.head?.sha;
-
- logger.withInfo(
- "GITHUB_CONTRACT_IMPACT_ANALYSIS is running...",
- integrationName,
- headSHA,
- "run"
- );
-
-
- if (!(await this.authIntegration({ octokit, context }))) {
- logger.withError(
- "Authentication failed. Wrong API Token.",
- integrationName,
- headSHA,
- "run"
- );
- throw { message: "Wrong API Token" };
- }
-
- let total_assets = 0;
-
- if (state === "open") {
- total_assets = await this.printDownstreamAssets({ octokit, context });
- }
-
- if (total_assets !== 0) {
- await this.sendSegmentEventOfIntegration({
- action: `${actionName}_run`,
- properties: {
- asset_count: total_assets,
- total_time: Date.now() - timeStart,
- },
- });
- }
-
- logger.withInfo(
- "Successfully Completed GITHUB_CONTRACT_IMPACT_ANALYSIS",
- integrationName,
- headSHA,
- "run"
- );
- } catch (error) {
- logger.withError(
- `Error in run(): ${error.message}`,
- integrationName,
- headSHA,
- "run"
- );
- throw error;
- }
- }
-
- async printDownstreamAssets({ octokit, context }) {
- logger.withInfo(
- "Printing downstream assets...",
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- try {
- const changedFiles = await this.getChangedFiles({ octokit, context });
- let comments = ``;
- let warningComments = ``;
- let totalChangedFiles = 0;
-
- const atlanConfig = ATLAN_CONFIG;
-
- // Read atlan config file
- const config = this.readYamlFile(atlanConfig);
- if (config.error) {
- logger.withError(
- `Failed to read atlan config file ${atlanConfig}: ${config.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- return;
- }
-
- let datasources = this.parseDatasourceFromConfig(config.contentYaml)
-
- // If no datasources found, do not proceed
- if (datasources.size <= 0) {
- logger.withError(
- `No datasources found in atlan config ${atlanConfig}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- return;
- }
-
- for (const { fileName, filePath, status } of changedFiles) {
- // Skipping non yaml files
- if (!filePath.endsWith('.yaml') && !filePath.endsWith('.yml')) {
- logger.withInfo(
- `Skipping file: ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue
- }
-
- logger.withInfo(
- `Processing file: ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- const contract = this.readYamlFile(filePath);
- if (contract.error) {
- logger.withError(
- `Failed to read yaml file ${filePath}: ${contract.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue
- }
-
- let dataset = contract.contentYaml.dataset
- // Skip non contract yaml file
- if (!dataset) {
- continue
- }
-
- const assetQualifiedName = this.getQualifiedName(
- datasources,
- contract.contentYaml
- );
-
- if (assetQualifiedName === undefined) {
- logger.withError(
- `Failed to construct asset qualified name for contract ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue;
- }
-
- logger.withInfo(
- `Generated asset qualified name ${assetQualifiedName} for contract ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- // Fetch asset from Atlan
- const asset = await getContractAsset({
- dataset,
- assetQualifiedName: assetQualifiedName
- });
-
- if (asset.error) {
- logger.withError(
- `Assets fetch error for ${dataset}: ${asset.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_get_asset",
- asset_name: dataset,
- msg: asset.error,
- },
- });
-
- totalChangedFiles++
- warningComments += asset.comment;
- warningComments += "\n\n---\n\n"
- continue;
- }
-
- logger.withInfo(
- `Processing asset: ${dataset}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- const timeStart = Date.now();
- const totalModifiedFiles = changedFiles.filter(
- (i) => i.status === "modified"
- ).length;
-
- // Fetch downstream assets
- const downstreamAssets = await getDownstreamLineageForAssets(
- asset,
- asset.guid,
- totalModifiedFiles,
- utmSource
- );
-
- if (downstreamAssets.error) {
- logger.withError(
- `Downstream assets error for ${dataset}: ${downstreamAssets.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_fetch_lineage",
- asset_guid: asset.guid,
- asset_name: asset.name,
- asset_typeName: asset.typeName,
- msg: downstreamAssets.error,
- },
- });
-
- totalChangedFiles++
- warningComments += downstreamAssets.comment;
- warningComments += "\n\n---\n\n"
- continue;
- }
-
- // Send segment event for successful downstream asset fetch
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_downstream_unfurl`,
- properties: {
- asset_guid: asset.guid,
- asset_type: asset.typeName,
- downstream_count: downstreamAssets.entities.length,
- total_fetch_time: Date.now() - timeStart,
- },
- });
-
- // Fetch classification for asset
- const classifications = await getAssetClassifications()
-
- if (classifications.error) {
- logger.withError(
- `Failed to fetch cllassification for ${assetObj["name"]}: ${classifications.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_get_classifications",
- msg: classifications.error,
- },
- });
- }
-
- // Construct comment for displaying downstream assets
- const comment = await this.renderDownstreamAssetsComment({
- asset,
- downstreamAssets,
- classifications,
- });
-
- comments += comment;
-
- if (comment.trim() !== "") {
- comments += "\n\n---\n\n";
- }
-
- totalChangedFiles++;
- }
-
- // Add header comment before asset info comments
- comments = getContractImpactAnalysisBaseComment(
- totalChangedFiles,
- comments,
- warningComments
- );
-
- const existingComment = await this.checkCommentExists({
- octokit,
- context,
- });
-
- logger.withInfo(
- `Existing Comment: ${existingComment?.id}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- if (totalChangedFiles > 0)
- await this.createIssueComment({
- octokit,
- context,
- content: comments,
- comment_id: existingComment?.id,
- });
-
- if (totalChangedFiles === 0 && existingComment)
- await this.deleteComment({
- octokit,
- context,
- comment_id: existingComment?.id,
- });
-
- logger.withInfo(
- "Successfully printed Downstream Assets",
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- return totalChangedFiles;
- } catch (error) {
- logger.withError(
- `Error in printDownstreamAssets: ${error.message}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- throw error;
- }
- }
-
- async authIntegration({ octokit, context }) {
- logger.withInfo(
- "Authenticating with Atlan",
- integrationName,
- headSHA,
- "authIntegration"
- );
-
- try {
- const response = await auth();
-
- const existingComment = await this.checkCommentExists({
- octokit,
- context,
- });
-
- logger.withInfo(
- `Existing Comment: ${existingComment?.id}`,
- integrationName,
- headSHA,
- "authIntegration"
- );
-
- if (response?.status === 401) {
- logger.withError(
- "Authentication failed: Status 401",
- integrationName,
- headSHA,
- "authIntegration"
- );
- await this.createIssueComment({
- octokit,
- context,
- content: getErrorResponseStatus401(ATLAN_INSTANCE_URL, context),
- comment_id: existingComment?.id,
- });
- return false;
- }
-
- if (response === undefined) {
- logger.withError(
- "Authentication failed: Undefined response",
- integrationName,
- headSHA,
- "authIntegration"
- );
- await this.createIssueComment({
- octokit,
- context,
- content: getErrorResponseStatusUndefined(ATLAN_INSTANCE_URL, context),
- comment_id: existingComment?.id,
- });
- return false;
- }
- logger.withInfo(
- "Successfully Authenticated with Atlan",
- integrationName,
- headSHA,
- "authIntegration"
- );
- return true;
- } catch (error) {
- logger.withError(
- `Error in authIntegration: ${error.message}`,
- integrationName,
- headSHA,
- "authIntegration"
- );
- throw error;
- }
- }
-
- async sendSegmentEventOfIntegration({ action, properties }) {
- try {
- const domain = new URL(ATLAN_INSTANCE_URL).hostname;
- const { context } = github; //confirm this
- logger.withInfo(
- `Sending Segment event for action: ${action}`,
- integrationName,
- headSHA,
- "sendSegmentEventOfIntegration"
- );
-
- const raw = stringify({
- category: "integration",
- object: "github",
- action,
- userId: "atlan-annonymous-github",
- properties: {
- ...properties,
- github_action_id: `https://github.com/${context?.payload?.repository?.full_name}/actions/runs/${context?.runId}`,
- domain,
- },
- });
-
- return sendSegmentEvent(action, raw);
- } catch (error) {
- logger.withError(
- `Error sending Segment event for action: ${action} - ${error.message}`,
- integrationName,
- headSHA,
- "sendSegmentEventOfIntegration"
- );
- throw error;
- }
- }
-
- async getChangedFiles({ octokit, context }) {
- try {
- logger.withInfo(
- "Fetching changed files...",
- integrationName,
- headSHA,
- "getChangedFiles"
- );
-
- const { repository, pull_request } = context.payload,
- owner = repository.owner.login,
- repo = repository.name,
- pull_number = pull_request.number;
-
- const res = await octokit.request(
- `GET /repos/${owner}/${repo}/pulls/${pull_number}/files`,
- {
- owner,
- repo,
- pull_number,
- }
- );
-
- var changedFiles = res.data
- .map(({ filename, status }) => {
- try {
- const isYamlFile = filename.match(/\.(yaml|yml)$/);
-
- if (isYamlFile) {
- const contractName = filename.split('/').pop().replace(/\.(yaml|yml)$/, '');
- return {
- fileName: contractName,
- filePath: filename,
- status,
- };
- }
- } catch (e) {
- logger.withError(
- `Error processing file: ${filename} - ${e.message}`,
- integrationName,
- headSHA,
- "getChangedFiles"
- );
- }
- })
- .filter((i) => i !== undefined);
-
- changedFiles = changedFiles.filter((item, index) => {
- return (
- changedFiles.findIndex((obj) => obj.fileName === item.fileName) ===
- index
- );
- });
-
- logger.withInfo(
- "Successfully fetched changed files",
- integrationName,
- headSHA,
- "getChangedFiles"
- );
-
- return changedFiles;
- } catch (error) {
- logger.withError(
- `Error fetching changed files - ${error.message}`,
- integrationName,
- headSHA,
- "getChangedFiles"
- );
- throw error;
- }
- }
-
- async checkCommentExists({ octokit, context }) {
- logger.withInfo(
- "Checking for existing comments...",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
-
- if (IS_DEV) {
- logger.withInfo(
- "Development mode enabled. Skipping comment check.",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- return null;
- }
-
- const { pull_request } = context.payload;
-
- try {
- const comments = await octokit.rest.issues.listComments({
- ...context.repo,
- issue_number: pull_request.number,
- });
-
- const existingComment = comments.data.find(
- (comment) =>
- comment.user.login === "github-actions[bot]" &&
- comment.body.includes(
- ""
- )
- );
- if (existingComment) {
- logger.withInfo(
- "Found existing comment: " + existingComment?.id,
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- } else {
- logger.withInfo(
- "No existing comment found",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- }
-
- return existingComment;
- } catch (error) {
- logger.withError(
- "Error checking for existing comments: " + error.message,
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- throw error;
- }
- }
-
- async createIssueComment({
- octokit,
- context,
- content,
- comment_id = null,
- forceNewComment = false,
- }) {
- logger.withInfo(
- "Creating an issue comment...",
- integrationName,
- headSHA,
- "createIssueComment"
- );
-
- const { pull_request } = context?.payload || {};
-
- content = `
-${content}`;
-
- const commentObj = {
- ...context.repo,
- issue_number: pull_request.number,
- body: content,
- };
-
- if (IS_DEV) {
- logger.withInfo(
- "Development mode enabled. Skipping comment creation.",
- integrationName,
- headSHA,
- "createIssueComment"
- );
-
- return content;
- }
-
- if (comment_id && !forceNewComment)
- return octokit.rest.issues.updateComment({ ...commentObj, comment_id });
- return octokit.rest.issues.createComment(commentObj);
- }
-
- async deleteComment({ octokit, context, comment_id }) {
- logger.withInfo(
- `Deleted comment with ID ${comment_id}`,
- integrationName,
- headSHA,
- "deleteComment"
- );
-
- const { pull_request } = context.payload;
-
- return octokit.rest.issues.deleteComment({
- ...context.repo,
- issue_number: pull_request.number,
- comment_id,
- });
- }
-
- async renderDownstreamAssetsComment({
- asset,
- downstreamAssets,
- classifications,
- }) {
- logger.withInfo(
- "Rendering Downstream Assets...",
- integrationName,
- headSHA,
- "renderDownstreamAssetsComment"
- );
- try {
- let impactedData = downstreamAssets.entities.map(
- ({
- displayText,
- guid,
- typeName,
- attributes,
- meanings,
- classificationNames,
- }) => {
- // Modifying the typeName and getting the readableTypeName
- let readableTypeName = typeName
- .toLowerCase()
- .replace(attributes.connectorName, "")
- .toUpperCase();
-
- // Filtering classifications based on classificationNames
- let classificationsObj = classifications.filter(({ name }) =>
- classificationNames.includes(name)
- );
-
- // Modifying the readableTypeName
- readableTypeName =
- readableTypeName.charAt(0).toUpperCase() +
- readableTypeName.slice(1).toLowerCase();
-
- return [
- guid,
- truncate(displayText),
- truncate(attributes.connectorName),
- truncate(readableTypeName),
- truncate(
- attributes?.userDescription || attributes?.description || ""
- ),
- attributes?.certificateStatus || "",
- truncate(
- [...attributes?.ownerUsers, ...attributes?.ownerGroups] || []
- ),
- truncate(
- meanings.map(
- ({ displayText, termGuid }) =>
- `[${displayText}](${ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_github_action)`
- )
- ),
- truncate(
- classificationsObj?.map(
- ({ name, displayName }) => `\`${displayName}\``
- )
- ),
- attributes?.sourceURL || "",
- ];
- }
- );
-
- // Sorting the impactedData first by typeName and then by connectorName
- impactedData = impactedData.sort((a, b) => a[3].localeCompare(b[3]));
- impactedData = impactedData.sort((a, b) => a[2].localeCompare(b[2]));
-
- // Creating rows for the downstream table
- let rows = impactedData.map(
- ([
- guid,
- displayText,
- connectorName,
- typeName,
- description,
- certificateStatus,
- owners,
- meanings,
- classifications,
- sourceUrl,
- ]) => {
- // Getting connector and certification images
- const connectorImage = getConnectorImage(connectorName);
- const certificationImage = certificateStatus
- ? getCertificationImage(certificateStatus)
- : "";
-
- return [
- `${connectorImage} [${displayText}](${ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_github_action) ${certificationImage}`,
- `\`${typeName}\``,
- description,
- owners,
- meanings,
- classifications,
- sourceUrl ? `[Open in ${connectorName}](${sourceUrl})` : " ",
- ];
- }
- );
-
- const assetInfo = getContractAssetInfo(
- ATLAN_INSTANCE_URL,
- asset
- );
-
- // Generating the downstream table
- const downstreamTable = getDownstreamTable(
- ATLAN_INSTANCE_URL,
- downstreamAssets,
- rows,
- asset
- );
-
- // Generating the "View asset in Atlan" button
- const viewAssetButton = getViewAssetButton(ATLAN_INSTANCE_URL, asset);
-
- // Generating the final comment based on the presence of downstream assets
- if (downstreamAssets.entityCount > 0) {
- return `${assetInfo}
-
-${downstreamTable}
-
-${viewAssetButton}`;
- } else {
- return `${assetInfo}
-
-No downstream assets found.
-
-${viewAssetButton}`;
- }
- } catch (error) {
- logger.withError(
- `Error rendering Downstream Assets: ${error.message}`,
- integrationName,
- headSHA,
- "renderDownstreamAssetsComment"
- );
- throw error;
- }
- }
-
- readYamlFile(filePath) {
- try {
- // Read file content synchronously
- const data = fs.readFileSync(filePath, 'utf8');
-
- // Parse the YAML data
- const parsedData = yaml.load(data);
-
- // Return parsed data
- return {
- contentString: data,
- contentYaml: parsedData
- };
- } catch (err) {
- return {
- error: err
- };
- }
- }
-
- parseDatasourceFromConfig(configYaml) {
- // Create a Map for keys starting with "data_source "
- const dataSourceMap = new Map();
-
- // Iterate through the object to find relevant keys
- for (const [key, value] of Object.entries(configYaml)) {
- if (key.startsWith('data_source ')) {
- // Trim the prefix and add to the Map
- const trimmedKey = key.replace('data_source ', '');
- dataSourceMap.set(trimmedKey, value);
- }
- }
-
- return dataSourceMap;
- }
-
- getQualifiedName(datasources, contractYaml) {
- if (contractYaml["data_source"] === undefined) {
- return;
- }
-
- if (!datasources.has(contractYaml.data_source)) {
- return;
- }
-
- let datasource = datasources.get(contractYaml.data_source)
- const qualifiedName = datasource?.connection?.qualified_name || '';
- const database = datasource?.database || '';
- const schema = datasource?.schema || '';
- // Format the output
- const assetQualifiedName = `${qualifiedName}/${database}/${schema}/${contractYaml.dataset}`;
- return assetQualifiedName;
- }
-}
diff --git a/adapters/templates/atlan.js b/adapters/templates/atlan.js
index 93227b1..d84e475 100644
--- a/adapters/templates/atlan.js
+++ b/adapters/templates/atlan.js
@@ -6,15 +6,6 @@ export function getErrorModelNotFound(name) {
`;
}
-export function getErrorAssetNotFound(name) {
- return `### Asset: **${name}**
- :warning: It seems that the underlying asset you were working with could not be found on Atlan. This could mean the asset is not synced or is currently unavailable.
- To address this:
- • Check asset sync: Ensure that the relevant assets are catalogued in Atlan.
- • Review asset source: Double-check the source database or data pipeline to ensure all upstream data is flowing correctly.
- `;
-}
-
export function getErrorDoesNotMaterialize(
name,
ATLAN_INSTANCE_URL,
@@ -38,20 +29,4 @@ export function getBaseComment(totalChangedFiles, comments) {
}** you have edited.
${comments}`
-}
-
-export function getContractImpactAnalysisBaseComment(
- totalChangedFiles,
- comments,
- warningComments
-) {
- return `### ${getImageURL("atlan-logo", 15, 15)} Atlan impact analysis
- We've detected changes in **${totalChangedFiles} ${
- totalChangedFiles > 1 ? "contracts" : "contract"
- }** that you've edited. Below is the downstream impact analysis of these changes.
-
- ${comments}
-
- ${warningComments}
- `
}
\ No newline at end of file
diff --git a/adapters/templates/github-integration.js b/adapters/templates/github-integration.js
index e83eb76..c692f80 100644
--- a/adapters/templates/github-integration.js
+++ b/adapters/templates/github-integration.js
@@ -1,4 +1,4 @@
-import { getCertificationImage, getConnectorImage, getImageURL } from "../utils/index.js";
+import { getImageURL, getConnectorImage, getCertificationImage } from "../utils/index.js";
export function getErrorResponseStatus401 (ATLAN_INSTANCE_URL, context) {
return `We couldn't connect to your Atlan Instance, please make sure to set the valid Atlan Bearer Token as \`ATLAN_API_TOKEN\` as this repository's action secret.
@@ -55,18 +55,6 @@ export function getAssetInfo(ATLAN_INSTANCE_URL, asset, materialisedAsset, envir
}`
}
-export function getContractAssetInfo(ATLAN_INSTANCE_URL, asset) {
- return `### ${getConnectorImage(
- asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_github_action) ${
- asset.attributes?.certificateStatus
- ? getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }`
-}
-
export function getDownstreamTable(ATLAN_INSTANCE_URL, downstreamAssets, rows, materialisedAsset) {
return `${
downstreamAssets.entityCount
diff --git a/adapters/utils/get-environment-variables.js b/adapters/utils/get-environment-variables.js
index 98653ae..30416a3 100644
--- a/adapters/utils/get-environment-variables.js
+++ b/adapters/utils/get-environment-variables.js
@@ -1,5 +1,5 @@
-import core from "@actions/core";
import dotenv from "dotenv";
+import core from "@actions/core";
dotenv.config();
//Common env variables
@@ -16,9 +16,6 @@ export const IGNORE_MODEL_ALIAS_MATCHING =
(process.env.IGNORE_MODEL_ALIAS_MATCHING ||
core.getInput("IGNORE_MODEL_ALIAS_MATCHING")) == "true";
-export const ATLAN_CONFIG =
- process.env.ATLAN_CONFIG || core.getInput("ATLAN_CONFIG");
-
//GITLAB SPECIFIC ENV VARIABLES
export async function getCIMergeRequestIID(
gitlab,
diff --git a/dist/index.js b/dist/index.js
index baa20cd..5a16acb 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -22387,104 +22387,149 @@ __nccwpck_require__.d(core_dist_namespaceObject, {
"Users": () => (Users)
});
-// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js
-var core = __nccwpck_require__(2186);
-// EXTERNAL MODULE: ./node_modules/dotenv/lib/main.js
-var main = __nccwpck_require__(2437);
-;// CONCATENATED MODULE: ./adapters/utils/get-environment-variables.js
+;// CONCATENATED MODULE: ./adapters/logger/logger.js
+// logger.js
+function getCurrentTimestamp() {
+ const now = new Date();
+ return now.toISOString();
+}
-main.config();
+function logInfo(message, method) {
+ const timestamp = getCurrentTimestamp();
+ const logEntry = {
+ level: "ERROR",
+ timestamp,
+ method,
+ message,
+ };
+ console.error(logEntry);
+}
-//Common env variables
-const get_environment_variables_ATLAN_INSTANCE_URL = new URL(
- process.env.ATLAN_INSTANCE_URL || core.getInput("ATLAN_INSTANCE_URL")
-).origin;
+function withInfo(message, vcs, sha, method) {
+ const timestamp = getCurrentTimestamp();
+ const logEntry = {
+ level: "INFO",
+ timestamp,
+ vcs,
+ sha,
+ method,
+ message,
+ };
+ console.log(logEntry);
+}
-const get_environment_variables_ATLAN_API_TOKEN =
- process.env.ATLAN_API_TOKEN || core.getInput("ATLAN_API_TOKEN");
+function withError(message, vcs, sha, method) {
+ const timestamp = getCurrentTimestamp();
+ const logEntry = {
+ level: "ERROR",
+ timestamp,
+ vcs,
+ sha,
+ method,
+ message,
+ };
+ console.error(logEntry);
+}
-const IS_DEV = process.env.IS_DEV;
+function debug(message, vcs, sha, method) {
+ const timestamp = getCurrentTimestamp();
+ const logEntry = {
+ level: "DEBUG",
+ timestamp,
+ vcs,
+ sha,
+ method,
+ message,
+ };
+ console.debug(logEntry);
+}
-const IGNORE_MODEL_ALIAS_MATCHING =
- (process.env.IGNORE_MODEL_ALIAS_MATCHING ||
- core.getInput("IGNORE_MODEL_ALIAS_MATCHING")) == "true";
+const logger = {
+ withInfo,
+ withError,
+ debug,
+ logInfo,
+};
-const ATLAN_CONFIG =
- process.env.ATLAN_CONFIG || core.getInput("ATLAN_CONFIG");
+/* harmony default export */ const logger_logger = (logger);
-//GITLAB SPECIFIC ENV VARIABLES
-async function getCIMergeRequestIID(
- gitlab,
- CI_PROJECT_ID,
- CI_COMMIT_SHA
-) {
- if (!process.env.CI_MERGE_REQUEST_IID) {
- const mergeRequestCommit = await gitlab.Commits.allMergeRequests(
- CI_PROJECT_ID,
- CI_COMMIT_SHA
- );
+;// CONCATENATED MODULE: ./adapters/gateway.js
+// Common Gateway for all integrations
- const firstMergeRequest = mergeRequestCommit[0];
- if (firstMergeRequest) {
- return firstMergeRequest.iid;
- }
+async function runAction(token, integrationModule) {
+ if (token === undefined) {
+ logger_logger.logInfo("Token not provided.", "runAction");
+ return;
}
-
- return process.env.CI_MERGE_REQUEST_IID;
+ const integration = new integrationModule(token);
+ await integration.run();
}
-const {
- CI_PROJECT_PATH,
- CI_PROJECT_ID,
- CI_JOB_URL,
- GITLAB_TOKEN,
- CI_COMMIT_MESSAGE,
- GITLAB_USER_LOGIN,
- CI_PROJECT_NAME,
- CI_COMMIT_SHA,
- CI_PROJECT_NAMESPACE,
-} = process.env;
+;// CONCATENATED MODULE: ./adapters/integrations/contract/contract.js
+// Common interface that each new integration has to implement
+class IntegrationInterface {
+ constructor(token) {
+ this.token = token;
+ }
-function getGitLabEnvironments() {
- const { DBT_ENVIRONMENT_BRANCH_MAP } = process.env;
+ async run() {
+ throw new Error("Not Implemented");
+ }
- if (DBT_ENVIRONMENT_BRANCH_MAP) {
- const environmentLines = DBT_ENVIRONMENT_BRANCH_MAP.split("\n");
- const environmentMap = {};
+ async printDownstreamAssets(config) {
+ throw new Error("Not Implemented");
+ }
- environmentLines.forEach((line) => {
- const [environment, branch] = line.split(":").map((item) => item.trim());
- if (environment && branch) {
- environmentMap[environment] = branch;
- }
- });
+ async setResourceOnAsset(config) {
+ throw new Error("Not Implemented");
+ }
- return environmentMap;
- } else {
- return {};
+ async authIntegration(config) {
+ throw new Error("Not Implemented");
}
-}
-//GITHUB SPECIFIC ENV VARIABLES
-const GITHUB_TOKEN =
- core.getInput("GITHUB_TOKEN") || process.env.GITHUB_TOKEN;
+ async sendSegmentEventOfIntegration({ action, properties }) {
+ throw new Error("Not Implemented");
+ }
-const getEnvironments = () => {
- return (
- core.getInput("DBT_ENVIRONMENT_BRANCH_MAP")
- ?.trim()
- ?.split("\n")
- ?.map((i) => i.split(":").map((i) => i.trim())) ?? []
- );
-};
+ async getChangedFiles(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async getAssetName(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async getFileContents(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async checkCommentExists(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async createIssueComment(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async deleteComment(config) {
+ throw new Error("Not Implemented");
+ }
+
+ async renderDownstreamAssetsComment() {
+ throw new Error("Not Implemented");
+ }
+}
-// EXTERNAL MODULE: external "fs"
-var external_fs_ = __nccwpck_require__(7147);
+// EXTERNAL MODULE: ./node_modules/@actions/github/lib/github.js
+var github = __nccwpck_require__(5438);
+// EXTERNAL MODULE: ./node_modules/json-stringify-safe/stringify.js
+var stringify = __nccwpck_require__(7073);
;// CONCATENATED MODULE: ./adapters/utils/get-image-url.js
-function get_image_url_getImageURL(name, height = 20, width = 20) {
+function getImageURL(name, height = 20, width = 20) {
try {
return ``;
} catch (e) {
@@ -22493,12 +22538,12 @@ function get_image_url_getImageURL(name, height = 20, width = 20) {
}
}
-function get_image_url_getConnectorImage(connectorName) {
- return get_image_url_getImageURL(`connector-${connectorName.toLowerCase()}`, 15, 15);
+function getConnectorImage(connectorName) {
+ return getImageURL(`connector-${connectorName.toLowerCase()}`, 15, 15);
}
-function get_image_url_getCertificationImage(certificationStatus) {
- return get_image_url_getImageURL(`certification-${certificationStatus.toLowerCase()}`, 15, 15);
+function getCertificationImage(certificationStatus) {
+ return getImageURL(`certification-${certificationStatus.toLowerCase()}`, 15, 15);
}
;// CONCATENATED MODULE: ./adapters/utils/hosted-images.js
@@ -24793,509 +24838,183 @@ function fixResponseChunkedTransferBadEnding(request, errorCallback) {
});
}
-;// CONCATENATED MODULE: ./adapters/utils/auth.js
+// EXTERNAL MODULE: ./node_modules/dotenv/lib/main.js
+var main = __nccwpck_require__(2437);
+// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js
+var core = __nccwpck_require__(2186);
+;// CONCATENATED MODULE: ./adapters/utils/get-environment-variables.js
+main.config();
-async function auth() {
- var myHeaders = {
- authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
- "content-type": "application/json",
- };
+//Common env variables
+const ATLAN_INSTANCE_URL = new URL(
+ process.env.ATLAN_INSTANCE_URL || core.getInput("ATLAN_INSTANCE_URL")
+).origin;
- var requestOptions = {
- method: "POST",
- headers: myHeaders,
- };
+const ATLAN_API_TOKEN =
+ process.env.ATLAN_API_TOKEN || core.getInput("ATLAN_API_TOKEN");
- var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta`,
- requestOptions
- ).catch((err) => {});
+const IS_DEV = process.env.IS_DEV;
- return response;
-}
+const IGNORE_MODEL_ALIAS_MATCHING =
+ (process.env.IGNORE_MODEL_ALIAS_MATCHING ||
+ core.getInput("IGNORE_MODEL_ALIAS_MATCHING")) == "true";
-;// CONCATENATED MODULE: ./adapters/utils/index.js
+//GITLAB SPECIFIC ENV VARIABLES
+async function getCIMergeRequestIID(
+ gitlab,
+ CI_PROJECT_ID,
+ CI_COMMIT_SHA
+) {
+ if (!process.env.CI_MERGE_REQUEST_IID) {
+ const mergeRequestCommit = await gitlab.Commits.allMergeRequests(
+ CI_PROJECT_ID,
+ CI_COMMIT_SHA
+ );
+ const firstMergeRequest = mergeRequestCommit[0];
+ if (firstMergeRequest) {
+ return firstMergeRequest.iid;
+ }
+ }
+ return process.env.CI_MERGE_REQUEST_IID;
+}
+const {
+ CI_PROJECT_PATH,
+ CI_PROJECT_ID,
+ CI_JOB_URL,
+ GITLAB_TOKEN,
+ CI_COMMIT_MESSAGE,
+ GITLAB_USER_LOGIN,
+ CI_PROJECT_NAME,
+ CI_COMMIT_SHA,
+ CI_PROJECT_NAMESPACE,
+} = process.env;
+function getGitLabEnvironments() {
+ const { DBT_ENVIRONMENT_BRANCH_MAP } = process.env;
+ if (DBT_ENVIRONMENT_BRANCH_MAP) {
+ const environmentLines = DBT_ENVIRONMENT_BRANCH_MAP.split("\n");
+ const environmentMap = {};
-;// CONCATENATED MODULE: ./adapters/templates/github-integration.js
+ environmentLines.forEach((line) => {
+ const [environment, branch] = line.split(":").map((item) => item.trim());
+ if (environment && branch) {
+ environmentMap[environment] = branch;
+ }
+ });
-
-function getErrorResponseStatus401 (ATLAN_INSTANCE_URL, context) {
- return `We couldn't connect to your Atlan Instance, please make sure to set the valid Atlan Bearer Token as \`ATLAN_API_TOKEN\` as this repository's action secret.
-
-Atlan Instance URL: ${ATLAN_INSTANCE_URL}
-
-Set your repository action secrets [here](https://github.com/${context.payload.repository.full_name}/settings/secrets/actions). For more information on how to setup the Atlan dbt Action, please read the [setup documentation here](https://github.com/atlanhq/dbt-action/blob/main/README.md).`
-}
-
-function getErrorResponseStatusUndefined(ATLAN_INSTANCE_URL, context) {
- return `We couldn't connect to your Atlan Instance, please make sure to set the valid Atlan Instance URL as \`ATLAN_INSTANCE_URL\` as this repository's action secret.
-
-Atlan Instance URL: ${ATLAN_INSTANCE_URL}
-
-Make sure your Atlan Instance URL is set in the following format.
-\`https://tenant.atlan.com\`
-
-Set your repository action secrets [here](https://github.com/${context.payload.repository.full_name}/settings/secrets/actions). For more information on how to setup the Atlan dbt Action, please read the [setup documentation here](https://github.com/atlanhq/dbt-action/blob/main/README.md).`
-}
-
-function getSetResourceOnAssetComment(tableMd, setResourceFailed) {
- return `## 🎊 Congrats on the merge!
-
- This pull request has been added as a resource to the following assets:
-
- ${setResourceFailed ? '> ⚠️ Seems like we were unable to set the resources for some of the assets due to insufficient permissions. To ensure that the pull request is linked as a resource, you will need to assign the right persona with requisite permissions to the API token.' : ''}
-
- Name | Resource set successfully
- --- | ---
- ${tableMd}
- `
-}
-
-function getAssetInfo(ATLAN_INSTANCE_URL, asset, materialisedAsset, environmentName, projectName) {
- return `### ${get_image_url_getConnectorImage(
- asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_github_action) ${
- asset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }
- Materialised asset: ${get_image_url_getConnectorImage(
- materialisedAsset.attributes.connectorName
- )} [${materialisedAsset.attributes.name}](${ATLAN_INSTANCE_URL}/assets/${
- materialisedAsset.guid
- }/overview?utm_source=dbt_github_action) ${
- materialisedAsset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(materialisedAsset.attributes.certificateStatus)
- : ""
- }${environmentName ? ` | Environment Name: \`${environmentName}\`` : ""}${
- projectName ? ` | Project Name: \`${projectName}\`` : ""
- }`
-}
-
-function getContractAssetInfo(ATLAN_INSTANCE_URL, asset) {
- return `### ${get_image_url_getConnectorImage(
- asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_github_action) ${
- asset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }`
-}
-
-function getDownstreamTable(ATLAN_INSTANCE_URL, downstreamAssets, rows, materialisedAsset) {
- return `${
- downstreamAssets.entityCount
- } downstream assets 👇
-
- Name | Type | Description | Owners | Terms | Classifications | Source URL
- --- | --- | --- | --- | --- | --- | ---
- ${rows
- .map((row) =>
- row.map((i) => i.replace(/\|/g, "•").replace(/\n/g, "")).join(" | ")
- )
- .join("\n")}
-
- ${
- downstreamAssets.hasMore
- ? `[See more downstream assets at Atlan](${ATLAN_INSTANCE_URL}/assets/${materialisedAsset.guid}/lineage?utm_source=dbt_github_action)`
- : ""
+ return environmentMap;
+ } else {
+ return {};
}
-
- `
-}
-
-function getViewAssetButton(ATLAN_INSTANCE_URL, asset) {
- return `${get_image_url_getImageURL(
- "atlan-logo",
- 15,
- 15
- )} [View asset in Atlan](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_github_action)`
-}
-
-function getMDCommentForModel(ATLAN_INSTANCE_URL, model) {
- return `${get_image_url_getConnectorImage(model?.attributes?.connectorName)} [${
- model?.displayText
- }](${ATLAN_INSTANCE_URL}/assets/${model?.guid}/overview?utm_source=dbt_github_action)`
}
-function getMDCommentForMaterialisedView(ATLAN_INSTANCE_URL, materialisedView) {
- return `${get_image_url_getConnectorImage(materialisedView?.attributes?.connectorName)} [${
- materialisedView?.attributes?.name
- }](${ATLAN_INSTANCE_URL}/assets/${materialisedView?.guid}/overview?utm_source=dbt_github_action)`
-}
-
-function getTableMD(md, resp) {
- return `${md} | ${resp ? '✅' : '❌'} \n`
-}
-;// CONCATENATED MODULE: ./adapters/integrations/contract/contract.js
-// Common interface that each new integration has to implement
-class IntegrationInterface {
- constructor(token) {
- this.token = token;
- }
-
- async run() {
- throw new Error("Not Implemented");
- }
-
- async printDownstreamAssets(config) {
- throw new Error("Not Implemented");
- }
-
- async setResourceOnAsset(config) {
- throw new Error("Not Implemented");
- }
-
- async authIntegration(config) {
- throw new Error("Not Implemented");
- }
+//GITHUB SPECIFIC ENV VARIABLES
+const GITHUB_TOKEN =
+ core.getInput("GITHUB_TOKEN") || process.env.GITHUB_TOKEN;
- async sendSegmentEventOfIntegration({ action, properties }) {
- throw new Error("Not Implemented");
- }
+const getEnvironments = () => {
+ return (
+ core.getInput("DBT_ENVIRONMENT_BRANCH_MAP")
+ ?.trim()
+ ?.split("\n")
+ ?.map((i) => i.split(":").map((i) => i.trim())) ?? []
+ );
+};
- async getChangedFiles(config) {
- throw new Error("Not Implemented");
- }
+;// CONCATENATED MODULE: ./adapters/utils/auth.js
- async getAssetName(config) {
- throw new Error("Not Implemented");
- }
- async getFileContents(config) {
- throw new Error("Not Implemented");
- }
- async checkCommentExists(config) {
- throw new Error("Not Implemented");
- }
+async function auth() {
+ var myHeaders = {
+ authorization: `Bearer ${ATLAN_API_TOKEN}`,
+ "content-type": "application/json",
+ };
- async createIssueComment(config) {
- throw new Error("Not Implemented");
- }
+ var requestOptions = {
+ method: "POST",
+ headers: myHeaders,
+ };
- async deleteComment(config) {
- throw new Error("Not Implemented");
- }
+ var response = await src_fetch(
+ `${ATLAN_INSTANCE_URL}/api/meta`,
+ requestOptions
+ ).catch((err) => {});
- async renderDownstreamAssetsComment() {
- throw new Error("Not Implemented");
- }
+ return response;
}
-;// CONCATENATED MODULE: ./adapters/api/get-asset-classifications.js
+;// CONCATENATED MODULE: ./adapters/utils/index.js
-async function getAssetClassifications() {
- var myHeaders = {
- Authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
- "Content-Type": "application/json",
- };
-
- var requestOptions = {
- method: "GET",
- headers: myHeaders,
- redirect: "follow",
- };
-
- var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/types/typedefs?type=classification`,
- requestOptions
- )
- .then((e) => e.json())
- .catch((err) => {
- return {
- error: err
- }
- });
- if (response.error) return response
-
- return response?.classificationDefs;
- }
-;// CONCATENATED MODULE: ./adapters/templates/atlan.js
-function getErrorModelNotFound(name) {
- return `
-
❌ Model with name **${name}** could not be found or is deleted
- `;
-}
+;// CONCATENATED MODULE: ./adapters/api/get-downstream-assets.js
-function getErrorAssetNotFound(name) {
- return `### Asset: **${name}**
- :warning: It seems that the underlying asset you were working with could not be found on Atlan. This could mean the asset is not synced or is currently unavailable.
- To address this:
- • Check asset sync: Ensure that the relevant assets are catalogued in Atlan.
- • Review asset source: Double-check the source database or data pipeline to ensure all upstream data is flowing correctly.
- `;
-}
-function getErrorDoesNotMaterialize(
- name,
- ATLAN_INSTANCE_URL,
- response,
- integration
-) {
- return `
-
❌ Model with name [${name}](${ATLAN_INSTANCE_URL}/assets/${response.entities[0].guid}/overview?utm_source=dbt_${integration}_action) does not materialise any asset
`;
-}
-function getNewModelAddedComment(fileName) {
- return `### ${get_image_url_getConnectorImage("dbt")} ${fileName} 🆕
- Its a new model and not present in Atlan yet, you'll see the downstream impact for it after its present in Atlan.`
-}
-function getBaseComment(totalChangedFiles, comments) {
- return `### ${get_image_url_getImageURL("atlan-logo", 15, 15)} Atlan impact analysis
- Here is your downstream impact analysis for **${totalChangedFiles} ${
- totalChangedFiles > 1 ? "models" : "model"
- }** you have edited.
-
- ${comments}`
-}
+const ASSETS_LIMIT = 100;
-function getContractImpactAnalysisBaseComment(
- totalChangedFiles,
- comments,
- warningComments
+async function getDownstreamAssets(
+ asset,
+ guid,
+ totalModifiedFiles,
+ sendSegmentEventOfIntegration,
+ integration
) {
- return `### ${get_image_url_getImageURL("atlan-logo", 15, 15)} Atlan impact analysis
- We've detected changes in **${totalChangedFiles} ${
- totalChangedFiles > 1 ? "contracts" : "contract"
- }** that you've edited. Below is the downstream impact analysis of these changes.
-
- ${comments}
-
- ${warningComments}
- `
-}
-// EXTERNAL MODULE: ./node_modules/json-stringify-safe/stringify.js
-var json_stringify_safe_stringify = __nccwpck_require__(7073);
-;// CONCATENATED MODULE: ./adapters/api/get-contract-asset.js
-
-
-
-
-
-
-async function getContractAsset({
- dataset,
- assetQualifiedName,
-}) {
var myHeaders = {
- Authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
- "Content-Type": "application/json",
+ authorization: `Bearer ${ATLAN_API_TOKEN}`,
+ "content-type": "application/json",
};
- var raw = json_stringify_safe_stringify(
- {
- dsl: {
- from: 0,
- size: 1,
- query: {
- bool: {
- must: [
- {
- match: {
- __state: "ACTIVE"
- }
- },
- {
- term: {
- qualifiedName: assetQualifiedName
- }
- }
- ]
- }
- }
- },
- attributes: [
- "guid",
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
+ var raw = stringify({
+ guid: guid,
+ size: Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
+ from: 0,
+ depth: 21,
+ direction: "OUTPUT",
+ entityFilters: {
+ condition: "AND",
+ criterion: [
+ {
+ attributeName: "__typeName",
+ operator: "not_contains",
+ attributeValue: "Process",
+ },
+ {
+ attributeName: "__state",
+ operator: "eq",
+ attributeValue: "ACTIVE",
+ },
],
- suppressLogs: true,
- showSearchScore: false,
- excludeClassifications: true,
- includeClassificationNames: true,
- excludeMeanings: false
- }
- );
-
- var requestOptions = {
- method: "POST",
- headers: myHeaders,
- body: raw,
- };
-
- var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/search/indexsearch`,
- requestOptions
- )
- .then((e) => e.json())
- .catch((err) => {
- return {
- error: err,
- comment: getErrorAssetNotFound(dataset)
- }
- });
-
- if (!response?.entities?.length) {
- return {
- error: "asset not found",
- comment: getErrorAssetNotFound(dataset),
- };
- }
-
- return response.entities[0];
-}
-
-;// CONCATENATED MODULE: ./adapters/api/get-downstream-assets.js
-
-
-
-
-
-
-const ASSETS_LIMIT = 100;
-
-async function getDownstreamAssets(
- asset,
- guid,
- totalModifiedFiles,
- sendSegmentEventOfIntegration,
- integration
-) {
- var myHeaders = {
- authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
- "content-type": "application/json",
- };
-
- var raw = json_stringify_safe_stringify({
- "guid": guid,
- "size": Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
- "from": 0,
- "depth": 21,
- "direction": "OUTPUT",
- "entityFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataEntityMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataAttributeMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "Process"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "ColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "BIProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranColumnProcess"
- }
- ]
},
- "entityTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "relationshipTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "attributes": [
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
+ attributes: [
+ "name",
+ "description",
+ "userDescription",
+ "sourceURL",
+ "qualifiedName",
+ "connectorName",
+ "certificateStatus",
+ "certificateUpdatedBy",
+ "certificateUpdatedAt",
+ "ownerUsers",
+ "ownerGroups",
+ "classificationNames",
+ "meanings",
],
- "excludeMeanings": false,
- "excludeClassifications": false
+ excludeMeanings: false,
+ excludeClassifications: false,
});
var requestOptions = {
@@ -25305,26 +25024,25 @@ async function getDownstreamAssets(
};
var handleError = (err) => {
- const comment = `
- ### ${get_image_url_getConnectorImage(asset.attributes.connectorName
- )} [${asset.displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=dbt_${integration}_action) ${
- asset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }
-
- _Failed to fetch impacted assets._
-
- ${get_image_url_getImageURL(
- "atlan-logo",
- 15,
- 15
- )} [View lineage in Atlan](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/lineage/overview?utm_source=dbt_${integration}_action)
- `;
+ const comment = `### ${getConnectorImage(
+ asset.attributes.connectorName
+ )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/overview?utm_source=dbt_${integration}_action) ${
+ asset.attributes?.certificateStatus
+ ? getCertificationImage(asset.attributes.certificateStatus)
+ : ""
+ }
+
+_Failed to fetch impacted assets._
+
+${getImageURL(
+ "atlan-logo",
+ 15,
+ 15
+)} [View lineage in Atlan](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/lineage/overview?utm_source=dbt_${integration}_action)`;
sendSegmentEventOfIntegration({
action: "dbt_ci_action_failure",
@@ -25341,7 +25059,7 @@ async function getDownstreamAssets(
};
var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/lineage/list`,
+ `${ATLAN_INSTANCE_URL}/api/meta/lineage/list`,
requestOptions
)
.then((e) => {
@@ -25357,253 +25075,43 @@ async function getDownstreamAssets(
};
});
if (response.error) return response;
-
- const modifiedEntities = response.entities.filter(item => item.guid !== guid)
-
- return {...response, entities: modifiedEntities}
-}
-
-function contructCommentForDownstreamLineageFetchError({
- asset,
- utmSource
-}){
- const comment = `
- ### ${getConnectorImage(asset.attributes.connectorName
- )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/overview?utm_source=${utmSource}) ${
- asset.attributes?.certificateStatus
- ? getCertificationImage(asset.attributes.certificateStatus)
- : ""
- }
-
- _Failed to fetch impacted assets._
-
- ${getImageURL(
- "atlan-logo",
- 15,
- 15
- )} [View lineage in Atlan](${ATLAN_INSTANCE_URL}/assets/${
- asset.guid
- }/lineage/overview?utm_source=${utmSource})
- `;
- return comment;
+ return response;
}
-async function getDownstreamLineageForAssets({
- asset,
- guid,
- totalModifiedFiles,
- utmSource
-}) {
- var myHeaders = {
- authorization: `Bearer ${ATLAN_API_TOKEN}`,
- "content-type": "application/json",
- };
-
- var raw = stringify({
- "guid": guid,
- "size": Math.max(Math.ceil(ASSETS_LIMIT / totalModifiedFiles), 1),
- "from": 0,
- "depth": 21,
- "direction": "OUTPUT",
- "entityFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DbtColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataEntityMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "DataAttributeMappingProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "Process"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "ColumnProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "BIProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranProcess"
- },
- {
- "attributeName": "__typeName",
- "operator": "neq",
- "attributeValue": "FivetranColumnProcess"
- }
- ]
- },
- "entityTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "relationshipTraversalFilters": {
- "condition": "AND",
- "criterion": [
- {
- "attributeName": "__state",
- "operator": "eq",
- "attributeValue": "ACTIVE"
- }
- ]
- },
- "attributes": [
- "name",
- "description",
- "userDescription",
- "sourceURL",
- "qualifiedName",
- "connectorName",
- "certificateStatus",
- "certificateUpdatedBy",
- "certificateUpdatedAt",
- "ownerUsers",
- "ownerGroups",
- "classificationNames",
- "meanings"
- ],
- "excludeMeanings": false,
- "excludeClassifications": false
- });
-
- var requestOptions = {
- method: "POST",
- headers: myHeaders,
- body: raw,
- };
-
- var response = await fetch(
- `${ATLAN_INSTANCE_URL}/api/meta/lineage/list`,
- requestOptions
- )
- .then((e) => {
- if (e.status === 200) {
- return e.json();
- } else {
- throw e;
- }
- })
- .catch((err) => {
- return {
- error: err,
- comment: contructCommentForDownstreamLineageFetchError({asset, utmSource}),
- };
- });
- if (response.error) return {
- error: err,
- comment: contructCommentForDownstreamLineageFetchError({asset, utmSource}),
- };
-
- const modifiedEntities = response.entities.filter(item => item.guid !== guid)
-
- return {...response, entities: modifiedEntities}
-}
+;// CONCATENATED MODULE: ./adapters/templates/atlan.js
-// EXTERNAL MODULE: ./node_modules/@actions/github/lib/github.js
-var github = __nccwpck_require__(5438);
-;// CONCATENATED MODULE: ./adapters/logger/logger.js
-// logger.js
-function getCurrentTimestamp() {
- const now = new Date();
- return now.toISOString();
+function getErrorModelNotFound(name) {
+ return `
+
❌ Model with name **${name}** could not be found or is deleted
+ `;
}
-function logInfo(message, method) {
- const timestamp = getCurrentTimestamp();
- const logEntry = {
- level: "ERROR",
- timestamp,
- method,
- message,
- };
- console.error(logEntry);
-}
+function getErrorDoesNotMaterialize(
+ name,
+ ATLAN_INSTANCE_URL,
+ response,
+ integration
+) {
-function withInfo(message, vcs, sha, method) {
- const timestamp = getCurrentTimestamp();
- const logEntry = {
- level: "INFO",
- timestamp,
- vcs,
- sha,
- method,
- message,
- };
- console.log(logEntry);
+ return `
+
❌ Model with name [${name}](${ATLAN_INSTANCE_URL}/assets/${response.entities[0].guid}/overview?utm_source=dbt_${integration}_action) does not materialise any asset
`;
}
-function withError(message, vcs, sha, method) {
- const timestamp = getCurrentTimestamp();
- const logEntry = {
- level: "ERROR",
- timestamp,
- vcs,
- sha,
- method,
- message,
- };
- console.error(logEntry);
+function getNewModelAddedComment(fileName) {
+ return `### ${getConnectorImage("dbt")} ${fileName} 🆕
+ Its a new model and not present in Atlan yet, you'll see the downstream impact for it after its present in Atlan.`
}
-function debug(message, vcs, sha, method) {
- const timestamp = getCurrentTimestamp();
- const logEntry = {
- level: "DEBUG",
- timestamp,
- vcs,
- sha,
- method,
- message,
- };
- console.debug(logEntry);
+function getBaseComment(totalChangedFiles, comments) {
+ return `### ${getImageURL("atlan-logo", 15, 15)} Atlan impact analysis
+ Here is your downstream impact analysis for **${totalChangedFiles} ${
+ totalChangedFiles > 1 ? "models" : "model"
+ }** you have edited.
+
+ ${comments}`
}
-
-const logger = {
- withInfo,
- withError,
- debug,
- logInfo,
-};
-
-/* harmony default export */ const logger_logger = (logger);
-
;// CONCATENATED MODULE: ./adapters/api/get-asset.js
@@ -25617,11 +25125,11 @@ async function getAsset({
integration,
}) {
var myHeaders = {
- Authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
+ Authorization: `Bearer ${ATLAN_API_TOKEN}`,
"Content-Type": "application/json",
};
- var raw = json_stringify_safe_stringify({
+ var raw = stringify({
dsl: {
from: 0,
size: 21,
@@ -25689,7 +25197,7 @@ async function getAsset({
};
var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/search/indexsearch#findAssetByExactName`,
+ `${ATLAN_INSTANCE_URL}/api/meta/search/indexsearch#findAssetByExactName`,
requestOptions
)
.then((e) => e.json())
@@ -25740,7 +25248,7 @@ async function getAsset({
return {
error: getErrorDoesNotMaterialize(
name,
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
response,
integration
),
@@ -25753,12 +25261,11 @@ async function getAsset({
-
async function getClassifications({
sendSegmentEventOfIntegration,
}) {
var myHeaders = {
- Authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
+ Authorization: `Bearer ${ATLAN_API_TOKEN}`,
"Content-Type": "application/json",
};
@@ -25769,7 +25276,7 @@ async function getClassifications({
};
var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/types/typedefs?type=classification`,
+ `${ATLAN_INSTANCE_URL}/api/meta/types/typedefs?type=classification`,
requestOptions
)
.then((e) => e.json())
@@ -25785,6 +25292,7 @@ async function getClassifications({
return response?.classificationDefs;
}
+
// EXTERNAL MODULE: ./node_modules/uuid/dist/index.js
var uuid_dist = __nccwpck_require__(5840);
;// CONCATENATED MODULE: ./node_modules/uuid/wrapper.mjs
@@ -25812,11 +25320,11 @@ async function createResource(
sendSegmentEventOfIntegration
) {
var myHeaders = {
- Authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
+ Authorization: `Bearer ${ATLAN_API_TOKEN}`,
"Content-Type": "application/json",
};
- var raw = json_stringify_safe_stringify({
+ var raw = stringify({
entities: [
{
typeName: "Link",
@@ -25842,7 +25350,7 @@ async function createResource(
};
var response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/meta/entity/bulk`,
+ `${ATLAN_INSTANCE_URL}/api/meta/entity/bulk`,
requestOptions
)
.then((e) => e.json())
@@ -25868,7 +25376,7 @@ async function createResource(
async function sendSegmentEvent(action, body) {
const myHeaders = {
- authorization: `Bearer ${get_environment_variables_ATLAN_API_TOKEN}`,
+ authorization: `Bearer ${ATLAN_API_TOKEN}`,
"content-type": "application/json",
};
@@ -25882,7 +25390,7 @@ async function sendSegmentEvent(action, body) {
if (!IS_DEV) {
response = await src_fetch(
- `${get_environment_variables_ATLAN_INSTANCE_URL}/api/service/segment/track`,
+ `${ATLAN_INSTANCE_URL}/api/service/segment/track`,
requestOptions
)
.then((resp) => {
@@ -25896,4727 +25404,121 @@ async function sendSegmentEvent(action, body) {
}
return response;
-}
-
-;// CONCATENATED MODULE: ./adapters/api/index.js
-
-
-
-
-
-
-
-;// CONCATENATED MODULE: ./node_modules/js-yaml/dist/js-yaml.mjs
-
-/*! js-yaml 4.1.0 https://github.com/nodeca/js-yaml @license MIT */
-function isNothing(subject) {
- return (typeof subject === 'undefined') || (subject === null);
-}
-
-
-function isObject(subject) {
- return (typeof subject === 'object') && (subject !== null);
-}
-
-
-function toArray(sequence) {
- if (Array.isArray(sequence)) return sequence;
- else if (isNothing(sequence)) return [];
-
- return [ sequence ];
-}
-
-
-function extend(target, source) {
- var index, length, key, sourceKeys;
-
- if (source) {
- sourceKeys = Object.keys(source);
-
- for (index = 0, length = sourceKeys.length; index < length; index += 1) {
- key = sourceKeys[index];
- target[key] = source[key];
- }
- }
-
- return target;
-}
-
-
-function repeat(string, count) {
- var result = '', cycle;
-
- for (cycle = 0; cycle < count; cycle += 1) {
- result += string;
- }
-
- return result;
-}
-
-
-function isNegativeZero(number) {
- return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number);
-}
-
-
-var isNothing_1 = isNothing;
-var isObject_1 = isObject;
-var toArray_1 = toArray;
-var repeat_1 = repeat;
-var isNegativeZero_1 = isNegativeZero;
-var extend_1 = extend;
-
-var common = {
- isNothing: isNothing_1,
- isObject: isObject_1,
- toArray: toArray_1,
- repeat: repeat_1,
- isNegativeZero: isNegativeZero_1,
- extend: extend_1
-};
-
-// YAML error class. http://stackoverflow.com/questions/8458984
-
-
-function formatError(exception, compact) {
- var where = '', message = exception.reason || '(unknown reason)';
-
- if (!exception.mark) return message;
-
- if (exception.mark.name) {
- where += 'in "' + exception.mark.name + '" ';
- }
-
- where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')';
-
- if (!compact && exception.mark.snippet) {
- where += '\n\n' + exception.mark.snippet;
- }
-
- return message + ' ' + where;
-}
-
-
-function YAMLException$1(reason, mark) {
- // Super constructor
- Error.call(this);
-
- this.name = 'YAMLException';
- this.reason = reason;
- this.mark = mark;
- this.message = formatError(this, false);
-
- // Include stack trace in error object
- if (Error.captureStackTrace) {
- // Chrome and NodeJS
- Error.captureStackTrace(this, this.constructor);
- } else {
- // FF, IE 10+ and Safari 6+. Fallback for others
- this.stack = (new Error()).stack || '';
- }
-}
-
-
-// Inherit from Error
-YAMLException$1.prototype = Object.create(Error.prototype);
-YAMLException$1.prototype.constructor = YAMLException$1;
-
-
-YAMLException$1.prototype.toString = function toString(compact) {
- return this.name + ': ' + formatError(this, compact);
-};
-
-
-var exception = YAMLException$1;
-
-// get snippet for a single line, respecting maxLength
-function getLine(buffer, lineStart, lineEnd, position, maxLineLength) {
- var head = '';
- var tail = '';
- var maxHalfLength = Math.floor(maxLineLength / 2) - 1;
-
- if (position - lineStart > maxHalfLength) {
- head = ' ... ';
- lineStart = position - maxHalfLength + head.length;
- }
-
- if (lineEnd - position > maxHalfLength) {
- tail = ' ...';
- lineEnd = position + maxHalfLength - tail.length;
- }
-
- return {
- str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail,
- pos: position - lineStart + head.length // relative position
- };
-}
-
-
-function padStart(string, max) {
- return common.repeat(' ', max - string.length) + string;
-}
-
-
-function makeSnippet(mark, options) {
- options = Object.create(options || null);
-
- if (!mark.buffer) return null;
-
- if (!options.maxLength) options.maxLength = 79;
- if (typeof options.indent !== 'number') options.indent = 1;
- if (typeof options.linesBefore !== 'number') options.linesBefore = 3;
- if (typeof options.linesAfter !== 'number') options.linesAfter = 2;
-
- var re = /\r?\n|\r|\0/g;
- var lineStarts = [ 0 ];
- var lineEnds = [];
- var match;
- var foundLineNo = -1;
-
- while ((match = re.exec(mark.buffer))) {
- lineEnds.push(match.index);
- lineStarts.push(match.index + match[0].length);
-
- if (mark.position <= match.index && foundLineNo < 0) {
- foundLineNo = lineStarts.length - 2;
- }
- }
-
- if (foundLineNo < 0) foundLineNo = lineStarts.length - 1;
-
- var result = '', i, line;
- var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length;
- var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3);
-
- for (i = 1; i <= options.linesBefore; i++) {
- if (foundLineNo - i < 0) break;
- line = getLine(
- mark.buffer,
- lineStarts[foundLineNo - i],
- lineEnds[foundLineNo - i],
- mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]),
- maxLineLength
- );
- result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) +
- ' | ' + line.str + '\n' + result;
- }
-
- line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength);
- result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) +
- ' | ' + line.str + '\n';
- result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n';
-
- for (i = 1; i <= options.linesAfter; i++) {
- if (foundLineNo + i >= lineEnds.length) break;
- line = getLine(
- mark.buffer,
- lineStarts[foundLineNo + i],
- lineEnds[foundLineNo + i],
- mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]),
- maxLineLength
- );
- result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) +
- ' | ' + line.str + '\n';
- }
-
- return result.replace(/\n$/, '');
-}
-
-
-var snippet = makeSnippet;
-
-var TYPE_CONSTRUCTOR_OPTIONS = [
- 'kind',
- 'multi',
- 'resolve',
- 'construct',
- 'instanceOf',
- 'predicate',
- 'represent',
- 'representName',
- 'defaultStyle',
- 'styleAliases'
-];
-
-var YAML_NODE_KINDS = [
- 'scalar',
- 'sequence',
- 'mapping'
-];
-
-function compileStyleAliases(map) {
- var result = {};
-
- if (map !== null) {
- Object.keys(map).forEach(function (style) {
- map[style].forEach(function (alias) {
- result[String(alias)] = style;
- });
- });
- }
-
- return result;
-}
-
-function Type$1(tag, options) {
- options = options || {};
-
- Object.keys(options).forEach(function (name) {
- if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) {
- throw new exception('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.');
- }
- });
-
- // TODO: Add tag format check.
- this.options = options; // keep original options in case user wants to extend this type later
- this.tag = tag;
- this.kind = options['kind'] || null;
- this.resolve = options['resolve'] || function () { return true; };
- this.construct = options['construct'] || function (data) { return data; };
- this.instanceOf = options['instanceOf'] || null;
- this.predicate = options['predicate'] || null;
- this.represent = options['represent'] || null;
- this.representName = options['representName'] || null;
- this.defaultStyle = options['defaultStyle'] || null;
- this.multi = options['multi'] || false;
- this.styleAliases = compileStyleAliases(options['styleAliases'] || null);
-
- if (YAML_NODE_KINDS.indexOf(this.kind) === -1) {
- throw new exception('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.');
- }
-}
-
-var type = Type$1;
-
-/*eslint-disable max-len*/
-
-
-
-
-
-function compileList(schema, name) {
- var result = [];
-
- schema[name].forEach(function (currentType) {
- var newIndex = result.length;
-
- result.forEach(function (previousType, previousIndex) {
- if (previousType.tag === currentType.tag &&
- previousType.kind === currentType.kind &&
- previousType.multi === currentType.multi) {
-
- newIndex = previousIndex;
- }
- });
-
- result[newIndex] = currentType;
- });
-
- return result;
-}
-
-
-function compileMap(/* lists... */) {
- var result = {
- scalar: {},
- sequence: {},
- mapping: {},
- fallback: {},
- multi: {
- scalar: [],
- sequence: [],
- mapping: [],
- fallback: []
- }
- }, index, length;
-
- function collectType(type) {
- if (type.multi) {
- result.multi[type.kind].push(type);
- result.multi['fallback'].push(type);
- } else {
- result[type.kind][type.tag] = result['fallback'][type.tag] = type;
- }
- }
-
- for (index = 0, length = arguments.length; index < length; index += 1) {
- arguments[index].forEach(collectType);
- }
- return result;
-}
-
-
-function Schema$1(definition) {
- return this.extend(definition);
-}
-
-
-Schema$1.prototype.extend = function extend(definition) {
- var implicit = [];
- var explicit = [];
-
- if (definition instanceof type) {
- // Schema.extend(type)
- explicit.push(definition);
-
- } else if (Array.isArray(definition)) {
- // Schema.extend([ type1, type2, ... ])
- explicit = explicit.concat(definition);
-
- } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) {
- // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] })
- if (definition.implicit) implicit = implicit.concat(definition.implicit);
- if (definition.explicit) explicit = explicit.concat(definition.explicit);
-
- } else {
- throw new exception('Schema.extend argument should be a Type, [ Type ], ' +
- 'or a schema definition ({ implicit: [...], explicit: [...] })');
- }
-
- implicit.forEach(function (type$1) {
- if (!(type$1 instanceof type)) {
- throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');
- }
-
- if (type$1.loadKind && type$1.loadKind !== 'scalar') {
- throw new exception('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.');
- }
-
- if (type$1.multi) {
- throw new exception('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.');
- }
- });
-
- explicit.forEach(function (type$1) {
- if (!(type$1 instanceof type)) {
- throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');
- }
- });
-
- var result = Object.create(Schema$1.prototype);
-
- result.implicit = (this.implicit || []).concat(implicit);
- result.explicit = (this.explicit || []).concat(explicit);
-
- result.compiledImplicit = compileList(result, 'implicit');
- result.compiledExplicit = compileList(result, 'explicit');
- result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit);
-
- return result;
-};
-
-
-var schema = Schema$1;
-
-var str = new type('tag:yaml.org,2002:str', {
- kind: 'scalar',
- construct: function (data) { return data !== null ? data : ''; }
-});
-
-var seq = new type('tag:yaml.org,2002:seq', {
- kind: 'sequence',
- construct: function (data) { return data !== null ? data : []; }
-});
-
-var map = new type('tag:yaml.org,2002:map', {
- kind: 'mapping',
- construct: function (data) { return data !== null ? data : {}; }
-});
-
-var failsafe = new schema({
- explicit: [
- str,
- seq,
- map
- ]
-});
-
-function resolveYamlNull(data) {
- if (data === null) return true;
-
- var max = data.length;
-
- return (max === 1 && data === '~') ||
- (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL'));
-}
-
-function constructYamlNull() {
- return null;
-}
-
-function isNull(object) {
- return object === null;
-}
-
-var _null = new type('tag:yaml.org,2002:null', {
- kind: 'scalar',
- resolve: resolveYamlNull,
- construct: constructYamlNull,
- predicate: isNull,
- represent: {
- canonical: function () { return '~'; },
- lowercase: function () { return 'null'; },
- uppercase: function () { return 'NULL'; },
- camelcase: function () { return 'Null'; },
- empty: function () { return ''; }
- },
- defaultStyle: 'lowercase'
-});
-
-function resolveYamlBoolean(data) {
- if (data === null) return false;
-
- var max = data.length;
-
- return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) ||
- (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE'));
-}
-
-function constructYamlBoolean(data) {
- return data === 'true' ||
- data === 'True' ||
- data === 'TRUE';
-}
-
-function isBoolean(object) {
- return Object.prototype.toString.call(object) === '[object Boolean]';
-}
-
-var bool = new type('tag:yaml.org,2002:bool', {
- kind: 'scalar',
- resolve: resolveYamlBoolean,
- construct: constructYamlBoolean,
- predicate: isBoolean,
- represent: {
- lowercase: function (object) { return object ? 'true' : 'false'; },
- uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; },
- camelcase: function (object) { return object ? 'True' : 'False'; }
- },
- defaultStyle: 'lowercase'
-});
-
-function isHexCode(c) {
- return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) ||
- ((0x41/* A */ <= c) && (c <= 0x46/* F */)) ||
- ((0x61/* a */ <= c) && (c <= 0x66/* f */));
-}
-
-function isOctCode(c) {
- return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */));
-}
-
-function isDecCode(c) {
- return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */));
-}
-
-function resolveYamlInteger(data) {
- if (data === null) return false;
-
- var max = data.length,
- index = 0,
- hasDigits = false,
- ch;
-
- if (!max) return false;
-
- ch = data[index];
-
- // sign
- if (ch === '-' || ch === '+') {
- ch = data[++index];
- }
-
- if (ch === '0') {
- // 0
- if (index + 1 === max) return true;
- ch = data[++index];
-
- // base 2, base 8, base 16
-
- if (ch === 'b') {
- // base 2
- index++;
-
- for (; index < max; index++) {
- ch = data[index];
- if (ch === '_') continue;
- if (ch !== '0' && ch !== '1') return false;
- hasDigits = true;
- }
- return hasDigits && ch !== '_';
- }
-
-
- if (ch === 'x') {
- // base 16
- index++;
-
- for (; index < max; index++) {
- ch = data[index];
- if (ch === '_') continue;
- if (!isHexCode(data.charCodeAt(index))) return false;
- hasDigits = true;
- }
- return hasDigits && ch !== '_';
- }
-
-
- if (ch === 'o') {
- // base 8
- index++;
-
- for (; index < max; index++) {
- ch = data[index];
- if (ch === '_') continue;
- if (!isOctCode(data.charCodeAt(index))) return false;
- hasDigits = true;
- }
- return hasDigits && ch !== '_';
- }
- }
-
- // base 10 (except 0)
-
- // value should not start with `_`;
- if (ch === '_') return false;
-
- for (; index < max; index++) {
- ch = data[index];
- if (ch === '_') continue;
- if (!isDecCode(data.charCodeAt(index))) {
- return false;
- }
- hasDigits = true;
- }
-
- // Should have digits and should not end with `_`
- if (!hasDigits || ch === '_') return false;
-
- return true;
-}
-
-function constructYamlInteger(data) {
- var value = data, sign = 1, ch;
-
- if (value.indexOf('_') !== -1) {
- value = value.replace(/_/g, '');
- }
-
- ch = value[0];
-
- if (ch === '-' || ch === '+') {
- if (ch === '-') sign = -1;
- value = value.slice(1);
- ch = value[0];
- }
-
- if (value === '0') return 0;
-
- if (ch === '0') {
- if (value[1] === 'b') return sign * parseInt(value.slice(2), 2);
- if (value[1] === 'x') return sign * parseInt(value.slice(2), 16);
- if (value[1] === 'o') return sign * parseInt(value.slice(2), 8);
- }
-
- return sign * parseInt(value, 10);
-}
-
-function isInteger(object) {
- return (Object.prototype.toString.call(object)) === '[object Number]' &&
- (object % 1 === 0 && !common.isNegativeZero(object));
-}
-
-var js_yaml_int = new type('tag:yaml.org,2002:int', {
- kind: 'scalar',
- resolve: resolveYamlInteger,
- construct: constructYamlInteger,
- predicate: isInteger,
- represent: {
- binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); },
- octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); },
- decimal: function (obj) { return obj.toString(10); },
- /* eslint-disable max-len */
- hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); }
- },
- defaultStyle: 'decimal',
- styleAliases: {
- binary: [ 2, 'bin' ],
- octal: [ 8, 'oct' ],
- decimal: [ 10, 'dec' ],
- hexadecimal: [ 16, 'hex' ]
- }
-});
-
-var YAML_FLOAT_PATTERN = new RegExp(
- // 2.5e4, 2.5 and integers
- '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' +
- // .2e4, .2
- // special case, seems not from spec
- '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' +
- // .inf
- '|[-+]?\\.(?:inf|Inf|INF)' +
- // .nan
- '|\\.(?:nan|NaN|NAN))$');
-
-function resolveYamlFloat(data) {
- if (data === null) return false;
-
- if (!YAML_FLOAT_PATTERN.test(data) ||
- // Quick hack to not allow integers end with `_`
- // Probably should update regexp & check speed
- data[data.length - 1] === '_') {
- return false;
- }
-
- return true;
-}
-
-function constructYamlFloat(data) {
- var value, sign;
-
- value = data.replace(/_/g, '').toLowerCase();
- sign = value[0] === '-' ? -1 : 1;
-
- if ('+-'.indexOf(value[0]) >= 0) {
- value = value.slice(1);
- }
-
- if (value === '.inf') {
- return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;
-
- } else if (value === '.nan') {
- return NaN;
- }
- return sign * parseFloat(value, 10);
-}
-
-
-var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;
-
-function representYamlFloat(object, style) {
- var res;
-
- if (isNaN(object)) {
- switch (style) {
- case 'lowercase': return '.nan';
- case 'uppercase': return '.NAN';
- case 'camelcase': return '.NaN';
- }
- } else if (Number.POSITIVE_INFINITY === object) {
- switch (style) {
- case 'lowercase': return '.inf';
- case 'uppercase': return '.INF';
- case 'camelcase': return '.Inf';
- }
- } else if (Number.NEGATIVE_INFINITY === object) {
- switch (style) {
- case 'lowercase': return '-.inf';
- case 'uppercase': return '-.INF';
- case 'camelcase': return '-.Inf';
- }
- } else if (common.isNegativeZero(object)) {
- return '-0.0';
- }
-
- res = object.toString(10);
-
- // JS stringifier can build scientific format without dots: 5e-100,
- // while YAML requres dot: 5.e-100. Fix it with simple hack
-
- return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res;
-}
-
-function isFloat(object) {
- return (Object.prototype.toString.call(object) === '[object Number]') &&
- (object % 1 !== 0 || common.isNegativeZero(object));
-}
-
-var js_yaml_float = new type('tag:yaml.org,2002:float', {
- kind: 'scalar',
- resolve: resolveYamlFloat,
- construct: constructYamlFloat,
- predicate: isFloat,
- represent: representYamlFloat,
- defaultStyle: 'lowercase'
-});
-
-var json = failsafe.extend({
- implicit: [
- _null,
- bool,
- js_yaml_int,
- js_yaml_float
- ]
-});
-
-var js_yaml_core = json;
-
-var YAML_DATE_REGEXP = new RegExp(
- '^([0-9][0-9][0-9][0-9])' + // [1] year
- '-([0-9][0-9])' + // [2] month
- '-([0-9][0-9])$'); // [3] day
-
-var YAML_TIMESTAMP_REGEXP = new RegExp(
- '^([0-9][0-9][0-9][0-9])' + // [1] year
- '-([0-9][0-9]?)' + // [2] month
- '-([0-9][0-9]?)' + // [3] day
- '(?:[Tt]|[ \\t]+)' + // ...
- '([0-9][0-9]?)' + // [4] hour
- ':([0-9][0-9])' + // [5] minute
- ':([0-9][0-9])' + // [6] second
- '(?:\\.([0-9]*))?' + // [7] fraction
- '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour
- '(?::([0-9][0-9]))?))?$'); // [11] tz_minute
-
-function resolveYamlTimestamp(data) {
- if (data === null) return false;
- if (YAML_DATE_REGEXP.exec(data) !== null) return true;
- if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true;
- return false;
-}
-
-function constructYamlTimestamp(data) {
- var match, year, month, day, hour, minute, second, fraction = 0,
- delta = null, tz_hour, tz_minute, date;
-
- match = YAML_DATE_REGEXP.exec(data);
- if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data);
-
- if (match === null) throw new Error('Date resolve error');
-
- // match: [1] year [2] month [3] day
-
- year = +(match[1]);
- month = +(match[2]) - 1; // JS month starts with 0
- day = +(match[3]);
-
- if (!match[4]) { // no hour
- return new Date(Date.UTC(year, month, day));
- }
-
- // match: [4] hour [5] minute [6] second [7] fraction
-
- hour = +(match[4]);
- minute = +(match[5]);
- second = +(match[6]);
-
- if (match[7]) {
- fraction = match[7].slice(0, 3);
- while (fraction.length < 3) { // milli-seconds
- fraction += '0';
- }
- fraction = +fraction;
- }
-
- // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute
-
- if (match[9]) {
- tz_hour = +(match[10]);
- tz_minute = +(match[11] || 0);
- delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds
- if (match[9] === '-') delta = -delta;
- }
-
- date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));
-
- if (delta) date.setTime(date.getTime() - delta);
-
- return date;
-}
-
-function representYamlTimestamp(object /*, style*/) {
- return object.toISOString();
-}
-
-var timestamp = new type('tag:yaml.org,2002:timestamp', {
- kind: 'scalar',
- resolve: resolveYamlTimestamp,
- construct: constructYamlTimestamp,
- instanceOf: Date,
- represent: representYamlTimestamp
-});
-
-function resolveYamlMerge(data) {
- return data === '<<' || data === null;
-}
-
-var merge = new type('tag:yaml.org,2002:merge', {
- kind: 'scalar',
- resolve: resolveYamlMerge
-});
-
-/*eslint-disable no-bitwise*/
-
-
-
-
-
-// [ 64, 65, 66 ] -> [ padding, CR, LF ]
-var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r';
-
-
-function resolveYamlBinary(data) {
- if (data === null) return false;
-
- var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP;
-
- // Convert one by one.
- for (idx = 0; idx < max; idx++) {
- code = map.indexOf(data.charAt(idx));
-
- // Skip CR/LF
- if (code > 64) continue;
-
- // Fail on illegal characters
- if (code < 0) return false;
-
- bitlen += 6;
- }
-
- // If there are any bits left, source was corrupted
- return (bitlen % 8) === 0;
-}
-
-function constructYamlBinary(data) {
- var idx, tailbits,
- input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan
- max = input.length,
- map = BASE64_MAP,
- bits = 0,
- result = [];
-
- // Collect by 6*4 bits (3 bytes)
-
- for (idx = 0; idx < max; idx++) {
- if ((idx % 4 === 0) && idx) {
- result.push((bits >> 16) & 0xFF);
- result.push((bits >> 8) & 0xFF);
- result.push(bits & 0xFF);
- }
-
- bits = (bits << 6) | map.indexOf(input.charAt(idx));
- }
-
- // Dump tail
-
- tailbits = (max % 4) * 6;
-
- if (tailbits === 0) {
- result.push((bits >> 16) & 0xFF);
- result.push((bits >> 8) & 0xFF);
- result.push(bits & 0xFF);
- } else if (tailbits === 18) {
- result.push((bits >> 10) & 0xFF);
- result.push((bits >> 2) & 0xFF);
- } else if (tailbits === 12) {
- result.push((bits >> 4) & 0xFF);
- }
-
- return new Uint8Array(result);
-}
-
-function representYamlBinary(object /*, style*/) {
- var result = '', bits = 0, idx, tail,
- max = object.length,
- map = BASE64_MAP;
-
- // Convert every three bytes to 4 ASCII characters.
-
- for (idx = 0; idx < max; idx++) {
- if ((idx % 3 === 0) && idx) {
- result += map[(bits >> 18) & 0x3F];
- result += map[(bits >> 12) & 0x3F];
- result += map[(bits >> 6) & 0x3F];
- result += map[bits & 0x3F];
- }
-
- bits = (bits << 8) + object[idx];
- }
-
- // Dump tail
-
- tail = max % 3;
-
- if (tail === 0) {
- result += map[(bits >> 18) & 0x3F];
- result += map[(bits >> 12) & 0x3F];
- result += map[(bits >> 6) & 0x3F];
- result += map[bits & 0x3F];
- } else if (tail === 2) {
- result += map[(bits >> 10) & 0x3F];
- result += map[(bits >> 4) & 0x3F];
- result += map[(bits << 2) & 0x3F];
- result += map[64];
- } else if (tail === 1) {
- result += map[(bits >> 2) & 0x3F];
- result += map[(bits << 4) & 0x3F];
- result += map[64];
- result += map[64];
- }
-
- return result;
-}
-
-function isBinary(obj) {
- return Object.prototype.toString.call(obj) === '[object Uint8Array]';
-}
-
-var binary = new type('tag:yaml.org,2002:binary', {
- kind: 'scalar',
- resolve: resolveYamlBinary,
- construct: constructYamlBinary,
- predicate: isBinary,
- represent: representYamlBinary
-});
-
-var _hasOwnProperty$3 = Object.prototype.hasOwnProperty;
-var _toString$2 = Object.prototype.toString;
-
-function resolveYamlOmap(data) {
- if (data === null) return true;
-
- var objectKeys = [], index, length, pair, pairKey, pairHasKey,
- object = data;
-
- for (index = 0, length = object.length; index < length; index += 1) {
- pair = object[index];
- pairHasKey = false;
-
- if (_toString$2.call(pair) !== '[object Object]') return false;
-
- for (pairKey in pair) {
- if (_hasOwnProperty$3.call(pair, pairKey)) {
- if (!pairHasKey) pairHasKey = true;
- else return false;
- }
- }
-
- if (!pairHasKey) return false;
-
- if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey);
- else return false;
- }
-
- return true;
-}
-
-function constructYamlOmap(data) {
- return data !== null ? data : [];
-}
-
-var omap = new type('tag:yaml.org,2002:omap', {
- kind: 'sequence',
- resolve: resolveYamlOmap,
- construct: constructYamlOmap
-});
-
-var _toString$1 = Object.prototype.toString;
-
-function resolveYamlPairs(data) {
- if (data === null) return true;
-
- var index, length, pair, keys, result,
- object = data;
-
- result = new Array(object.length);
-
- for (index = 0, length = object.length; index < length; index += 1) {
- pair = object[index];
-
- if (_toString$1.call(pair) !== '[object Object]') return false;
-
- keys = Object.keys(pair);
-
- if (keys.length !== 1) return false;
-
- result[index] = [ keys[0], pair[keys[0]] ];
- }
-
- return true;
-}
-
-function constructYamlPairs(data) {
- if (data === null) return [];
-
- var index, length, pair, keys, result,
- object = data;
-
- result = new Array(object.length);
-
- for (index = 0, length = object.length; index < length; index += 1) {
- pair = object[index];
-
- keys = Object.keys(pair);
-
- result[index] = [ keys[0], pair[keys[0]] ];
- }
-
- return result;
-}
-
-var pairs = new type('tag:yaml.org,2002:pairs', {
- kind: 'sequence',
- resolve: resolveYamlPairs,
- construct: constructYamlPairs
-});
-
-var _hasOwnProperty$2 = Object.prototype.hasOwnProperty;
-
-function resolveYamlSet(data) {
- if (data === null) return true;
-
- var key, object = data;
-
- for (key in object) {
- if (_hasOwnProperty$2.call(object, key)) {
- if (object[key] !== null) return false;
- }
- }
-
- return true;
-}
-
-function constructYamlSet(data) {
- return data !== null ? data : {};
-}
-
-var set = new type('tag:yaml.org,2002:set', {
- kind: 'mapping',
- resolve: resolveYamlSet,
- construct: constructYamlSet
-});
-
-var _default = js_yaml_core.extend({
- implicit: [
- timestamp,
- merge
- ],
- explicit: [
- binary,
- omap,
- pairs,
- set
- ]
-});
-
-/*eslint-disable max-len,no-use-before-define*/
-
-
-
-
-
-
-
-var _hasOwnProperty$1 = Object.prototype.hasOwnProperty;
-
-
-var CONTEXT_FLOW_IN = 1;
-var CONTEXT_FLOW_OUT = 2;
-var CONTEXT_BLOCK_IN = 3;
-var CONTEXT_BLOCK_OUT = 4;
-
-
-var CHOMPING_CLIP = 1;
-var CHOMPING_STRIP = 2;
-var CHOMPING_KEEP = 3;
-
-
-var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/;
-var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/;
-var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/;
-var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i;
-var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i;
-
-
-function _class(obj) { return Object.prototype.toString.call(obj); }
-
-function is_EOL(c) {
- return (c === 0x0A/* LF */) || (c === 0x0D/* CR */);
-}
-
-function is_WHITE_SPACE(c) {
- return (c === 0x09/* Tab */) || (c === 0x20/* Space */);
-}
-
-function is_WS_OR_EOL(c) {
- return (c === 0x09/* Tab */) ||
- (c === 0x20/* Space */) ||
- (c === 0x0A/* LF */) ||
- (c === 0x0D/* CR */);
-}
-
-function is_FLOW_INDICATOR(c) {
- return c === 0x2C/* , */ ||
- c === 0x5B/* [ */ ||
- c === 0x5D/* ] */ ||
- c === 0x7B/* { */ ||
- c === 0x7D/* } */;
-}
-
-function fromHexCode(c) {
- var lc;
-
- if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {
- return c - 0x30;
- }
-
- /*eslint-disable no-bitwise*/
- lc = c | 0x20;
-
- if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) {
- return lc - 0x61 + 10;
- }
-
- return -1;
-}
-
-function escapedHexLen(c) {
- if (c === 0x78/* x */) { return 2; }
- if (c === 0x75/* u */) { return 4; }
- if (c === 0x55/* U */) { return 8; }
- return 0;
-}
-
-function fromDecimalCode(c) {
- if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {
- return c - 0x30;
- }
-
- return -1;
-}
-
-function simpleEscapeSequence(c) {
- /* eslint-disable indent */
- return (c === 0x30/* 0 */) ? '\x00' :
- (c === 0x61/* a */) ? '\x07' :
- (c === 0x62/* b */) ? '\x08' :
- (c === 0x74/* t */) ? '\x09' :
- (c === 0x09/* Tab */) ? '\x09' :
- (c === 0x6E/* n */) ? '\x0A' :
- (c === 0x76/* v */) ? '\x0B' :
- (c === 0x66/* f */) ? '\x0C' :
- (c === 0x72/* r */) ? '\x0D' :
- (c === 0x65/* e */) ? '\x1B' :
- (c === 0x20/* Space */) ? ' ' :
- (c === 0x22/* " */) ? '\x22' :
- (c === 0x2F/* / */) ? '/' :
- (c === 0x5C/* \ */) ? '\x5C' :
- (c === 0x4E/* N */) ? '\x85' :
- (c === 0x5F/* _ */) ? '\xA0' :
- (c === 0x4C/* L */) ? '\u2028' :
- (c === 0x50/* P */) ? '\u2029' : '';
-}
-
-function charFromCodepoint(c) {
- if (c <= 0xFFFF) {
- return String.fromCharCode(c);
- }
- // Encode UTF-16 surrogate pair
- // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF
- return String.fromCharCode(
- ((c - 0x010000) >> 10) + 0xD800,
- ((c - 0x010000) & 0x03FF) + 0xDC00
- );
-}
-
-var simpleEscapeCheck = new Array(256); // integer, for fast access
-var simpleEscapeMap = new Array(256);
-for (var i = 0; i < 256; i++) {
- simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0;
- simpleEscapeMap[i] = simpleEscapeSequence(i);
-}
-
-
-function State$1(input, options) {
- this.input = input;
-
- this.filename = options['filename'] || null;
- this.schema = options['schema'] || _default;
- this.onWarning = options['onWarning'] || null;
- // (Hidden) Remove? makes the loader to expect YAML 1.1 documents
- // if such documents have no explicit %YAML directive
- this.legacy = options['legacy'] || false;
-
- this.json = options['json'] || false;
- this.listener = options['listener'] || null;
-
- this.implicitTypes = this.schema.compiledImplicit;
- this.typeMap = this.schema.compiledTypeMap;
-
- this.length = input.length;
- this.position = 0;
- this.line = 0;
- this.lineStart = 0;
- this.lineIndent = 0;
-
- // position of first leading tab in the current line,
- // used to make sure there are no tabs in the indentation
- this.firstTabInLine = -1;
-
- this.documents = [];
-
- /*
- this.version;
- this.checkLineBreaks;
- this.tagMap;
- this.anchorMap;
- this.tag;
- this.anchor;
- this.kind;
- this.result;*/
-
-}
-
-
-function generateError(state, message) {
- var mark = {
- name: state.filename,
- buffer: state.input.slice(0, -1), // omit trailing \0
- position: state.position,
- line: state.line,
- column: state.position - state.lineStart
- };
-
- mark.snippet = snippet(mark);
-
- return new exception(message, mark);
-}
-
-function throwError(state, message) {
- throw generateError(state, message);
-}
-
-function throwWarning(state, message) {
- if (state.onWarning) {
- state.onWarning.call(null, generateError(state, message));
- }
-}
-
-
-var directiveHandlers = {
-
- YAML: function handleYamlDirective(state, name, args) {
-
- var match, major, minor;
-
- if (state.version !== null) {
- throwError(state, 'duplication of %YAML directive');
- }
-
- if (args.length !== 1) {
- throwError(state, 'YAML directive accepts exactly one argument');
- }
-
- match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]);
-
- if (match === null) {
- throwError(state, 'ill-formed argument of the YAML directive');
- }
-
- major = parseInt(match[1], 10);
- minor = parseInt(match[2], 10);
-
- if (major !== 1) {
- throwError(state, 'unacceptable YAML version of the document');
- }
-
- state.version = args[0];
- state.checkLineBreaks = (minor < 2);
-
- if (minor !== 1 && minor !== 2) {
- throwWarning(state, 'unsupported YAML version of the document');
- }
- },
-
- TAG: function handleTagDirective(state, name, args) {
-
- var handle, prefix;
-
- if (args.length !== 2) {
- throwError(state, 'TAG directive accepts exactly two arguments');
- }
-
- handle = args[0];
- prefix = args[1];
-
- if (!PATTERN_TAG_HANDLE.test(handle)) {
- throwError(state, 'ill-formed tag handle (first argument) of the TAG directive');
- }
-
- if (_hasOwnProperty$1.call(state.tagMap, handle)) {
- throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle');
- }
-
- if (!PATTERN_TAG_URI.test(prefix)) {
- throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive');
- }
-
- try {
- prefix = decodeURIComponent(prefix);
- } catch (err) {
- throwError(state, 'tag prefix is malformed: ' + prefix);
- }
-
- state.tagMap[handle] = prefix;
- }
-};
-
-
-function captureSegment(state, start, end, checkJson) {
- var _position, _length, _character, _result;
-
- if (start < end) {
- _result = state.input.slice(start, end);
-
- if (checkJson) {
- for (_position = 0, _length = _result.length; _position < _length; _position += 1) {
- _character = _result.charCodeAt(_position);
- if (!(_character === 0x09 ||
- (0x20 <= _character && _character <= 0x10FFFF))) {
- throwError(state, 'expected valid JSON character');
- }
- }
- } else if (PATTERN_NON_PRINTABLE.test(_result)) {
- throwError(state, 'the stream contains non-printable characters');
- }
-
- state.result += _result;
- }
-}
-
-function mergeMappings(state, destination, source, overridableKeys) {
- var sourceKeys, key, index, quantity;
-
- if (!common.isObject(source)) {
- throwError(state, 'cannot merge mappings; the provided source object is unacceptable');
- }
-
- sourceKeys = Object.keys(source);
-
- for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) {
- key = sourceKeys[index];
-
- if (!_hasOwnProperty$1.call(destination, key)) {
- destination[key] = source[key];
- overridableKeys[key] = true;
- }
- }
-}
-
-function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode,
- startLine, startLineStart, startPos) {
-
- var index, quantity;
-
- // The output is a plain object here, so keys can only be strings.
- // We need to convert keyNode to a string, but doing so can hang the process
- // (deeply nested arrays that explode exponentially using aliases).
- if (Array.isArray(keyNode)) {
- keyNode = Array.prototype.slice.call(keyNode);
-
- for (index = 0, quantity = keyNode.length; index < quantity; index += 1) {
- if (Array.isArray(keyNode[index])) {
- throwError(state, 'nested arrays are not supported inside keys');
- }
-
- if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') {
- keyNode[index] = '[object Object]';
- }
- }
- }
-
- // Avoid code execution in load() via toString property
- // (still use its own toString for arrays, timestamps,
- // and whatever user schema extensions happen to have @@toStringTag)
- if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') {
- keyNode = '[object Object]';
- }
-
-
- keyNode = String(keyNode);
-
- if (_result === null) {
- _result = {};
- }
-
- if (keyTag === 'tag:yaml.org,2002:merge') {
- if (Array.isArray(valueNode)) {
- for (index = 0, quantity = valueNode.length; index < quantity; index += 1) {
- mergeMappings(state, _result, valueNode[index], overridableKeys);
- }
- } else {
- mergeMappings(state, _result, valueNode, overridableKeys);
- }
- } else {
- if (!state.json &&
- !_hasOwnProperty$1.call(overridableKeys, keyNode) &&
- _hasOwnProperty$1.call(_result, keyNode)) {
- state.line = startLine || state.line;
- state.lineStart = startLineStart || state.lineStart;
- state.position = startPos || state.position;
- throwError(state, 'duplicated mapping key');
- }
-
- // used for this specific key only because Object.defineProperty is slow
- if (keyNode === '__proto__') {
- Object.defineProperty(_result, keyNode, {
- configurable: true,
- enumerable: true,
- writable: true,
- value: valueNode
- });
- } else {
- _result[keyNode] = valueNode;
- }
- delete overridableKeys[keyNode];
- }
-
- return _result;
-}
-
-function readLineBreak(state) {
- var ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch === 0x0A/* LF */) {
- state.position++;
- } else if (ch === 0x0D/* CR */) {
- state.position++;
- if (state.input.charCodeAt(state.position) === 0x0A/* LF */) {
- state.position++;
- }
- } else {
- throwError(state, 'a line break is expected');
- }
-
- state.line += 1;
- state.lineStart = state.position;
- state.firstTabInLine = -1;
-}
-
-function skipSeparationSpace(state, allowComments, checkIndent) {
- var lineBreaks = 0,
- ch = state.input.charCodeAt(state.position);
-
- while (ch !== 0) {
- while (is_WHITE_SPACE(ch)) {
- if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) {
- state.firstTabInLine = state.position;
- }
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (allowComments && ch === 0x23/* # */) {
- do {
- ch = state.input.charCodeAt(++state.position);
- } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0);
- }
-
- if (is_EOL(ch)) {
- readLineBreak(state);
-
- ch = state.input.charCodeAt(state.position);
- lineBreaks++;
- state.lineIndent = 0;
-
- while (ch === 0x20/* Space */) {
- state.lineIndent++;
- ch = state.input.charCodeAt(++state.position);
- }
- } else {
- break;
- }
- }
-
- if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) {
- throwWarning(state, 'deficient indentation');
- }
-
- return lineBreaks;
-}
-
-function testDocumentSeparator(state) {
- var _position = state.position,
- ch;
-
- ch = state.input.charCodeAt(_position);
-
- // Condition state.position === state.lineStart is tested
- // in parent on each call, for efficiency. No needs to test here again.
- if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) &&
- ch === state.input.charCodeAt(_position + 1) &&
- ch === state.input.charCodeAt(_position + 2)) {
-
- _position += 3;
-
- ch = state.input.charCodeAt(_position);
-
- if (ch === 0 || is_WS_OR_EOL(ch)) {
- return true;
- }
- }
-
- return false;
-}
-
-function writeFoldedLines(state, count) {
- if (count === 1) {
- state.result += ' ';
- } else if (count > 1) {
- state.result += common.repeat('\n', count - 1);
- }
-}
-
-
-function readPlainScalar(state, nodeIndent, withinFlowCollection) {
- var preceding,
- following,
- captureStart,
- captureEnd,
- hasPendingContent,
- _line,
- _lineStart,
- _lineIndent,
- _kind = state.kind,
- _result = state.result,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (is_WS_OR_EOL(ch) ||
- is_FLOW_INDICATOR(ch) ||
- ch === 0x23/* # */ ||
- ch === 0x26/* & */ ||
- ch === 0x2A/* * */ ||
- ch === 0x21/* ! */ ||
- ch === 0x7C/* | */ ||
- ch === 0x3E/* > */ ||
- ch === 0x27/* ' */ ||
- ch === 0x22/* " */ ||
- ch === 0x25/* % */ ||
- ch === 0x40/* @ */ ||
- ch === 0x60/* ` */) {
- return false;
- }
-
- if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) {
- following = state.input.charCodeAt(state.position + 1);
-
- if (is_WS_OR_EOL(following) ||
- withinFlowCollection && is_FLOW_INDICATOR(following)) {
- return false;
- }
- }
-
- state.kind = 'scalar';
- state.result = '';
- captureStart = captureEnd = state.position;
- hasPendingContent = false;
-
- while (ch !== 0) {
- if (ch === 0x3A/* : */) {
- following = state.input.charCodeAt(state.position + 1);
-
- if (is_WS_OR_EOL(following) ||
- withinFlowCollection && is_FLOW_INDICATOR(following)) {
- break;
- }
-
- } else if (ch === 0x23/* # */) {
- preceding = state.input.charCodeAt(state.position - 1);
-
- if (is_WS_OR_EOL(preceding)) {
- break;
- }
-
- } else if ((state.position === state.lineStart && testDocumentSeparator(state)) ||
- withinFlowCollection && is_FLOW_INDICATOR(ch)) {
- break;
-
- } else if (is_EOL(ch)) {
- _line = state.line;
- _lineStart = state.lineStart;
- _lineIndent = state.lineIndent;
- skipSeparationSpace(state, false, -1);
-
- if (state.lineIndent >= nodeIndent) {
- hasPendingContent = true;
- ch = state.input.charCodeAt(state.position);
- continue;
- } else {
- state.position = captureEnd;
- state.line = _line;
- state.lineStart = _lineStart;
- state.lineIndent = _lineIndent;
- break;
- }
- }
-
- if (hasPendingContent) {
- captureSegment(state, captureStart, captureEnd, false);
- writeFoldedLines(state, state.line - _line);
- captureStart = captureEnd = state.position;
- hasPendingContent = false;
- }
-
- if (!is_WHITE_SPACE(ch)) {
- captureEnd = state.position + 1;
- }
-
- ch = state.input.charCodeAt(++state.position);
- }
-
- captureSegment(state, captureStart, captureEnd, false);
-
- if (state.result) {
- return true;
- }
-
- state.kind = _kind;
- state.result = _result;
- return false;
-}
-
-function readSingleQuotedScalar(state, nodeIndent) {
- var ch,
- captureStart, captureEnd;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch !== 0x27/* ' */) {
- return false;
- }
-
- state.kind = 'scalar';
- state.result = '';
- state.position++;
- captureStart = captureEnd = state.position;
-
- while ((ch = state.input.charCodeAt(state.position)) !== 0) {
- if (ch === 0x27/* ' */) {
- captureSegment(state, captureStart, state.position, true);
- ch = state.input.charCodeAt(++state.position);
-
- if (ch === 0x27/* ' */) {
- captureStart = state.position;
- state.position++;
- captureEnd = state.position;
- } else {
- return true;
- }
-
- } else if (is_EOL(ch)) {
- captureSegment(state, captureStart, captureEnd, true);
- writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
- captureStart = captureEnd = state.position;
-
- } else if (state.position === state.lineStart && testDocumentSeparator(state)) {
- throwError(state, 'unexpected end of the document within a single quoted scalar');
-
- } else {
- state.position++;
- captureEnd = state.position;
- }
- }
-
- throwError(state, 'unexpected end of the stream within a single quoted scalar');
-}
-
-function readDoubleQuotedScalar(state, nodeIndent) {
- var captureStart,
- captureEnd,
- hexLength,
- hexResult,
- tmp,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch !== 0x22/* " */) {
- return false;
- }
-
- state.kind = 'scalar';
- state.result = '';
- state.position++;
- captureStart = captureEnd = state.position;
-
- while ((ch = state.input.charCodeAt(state.position)) !== 0) {
- if (ch === 0x22/* " */) {
- captureSegment(state, captureStart, state.position, true);
- state.position++;
- return true;
-
- } else if (ch === 0x5C/* \ */) {
- captureSegment(state, captureStart, state.position, true);
- ch = state.input.charCodeAt(++state.position);
-
- if (is_EOL(ch)) {
- skipSeparationSpace(state, false, nodeIndent);
-
- // TODO: rework to inline fn with no type cast?
- } else if (ch < 256 && simpleEscapeCheck[ch]) {
- state.result += simpleEscapeMap[ch];
- state.position++;
-
- } else if ((tmp = escapedHexLen(ch)) > 0) {
- hexLength = tmp;
- hexResult = 0;
-
- for (; hexLength > 0; hexLength--) {
- ch = state.input.charCodeAt(++state.position);
-
- if ((tmp = fromHexCode(ch)) >= 0) {
- hexResult = (hexResult << 4) + tmp;
-
- } else {
- throwError(state, 'expected hexadecimal character');
- }
- }
-
- state.result += charFromCodepoint(hexResult);
-
- state.position++;
-
- } else {
- throwError(state, 'unknown escape sequence');
- }
-
- captureStart = captureEnd = state.position;
-
- } else if (is_EOL(ch)) {
- captureSegment(state, captureStart, captureEnd, true);
- writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
- captureStart = captureEnd = state.position;
-
- } else if (state.position === state.lineStart && testDocumentSeparator(state)) {
- throwError(state, 'unexpected end of the document within a double quoted scalar');
-
- } else {
- state.position++;
- captureEnd = state.position;
- }
- }
-
- throwError(state, 'unexpected end of the stream within a double quoted scalar');
-}
-
-function readFlowCollection(state, nodeIndent) {
- var readNext = true,
- _line,
- _lineStart,
- _pos,
- _tag = state.tag,
- _result,
- _anchor = state.anchor,
- following,
- terminator,
- isPair,
- isExplicitPair,
- isMapping,
- overridableKeys = Object.create(null),
- keyNode,
- keyTag,
- valueNode,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch === 0x5B/* [ */) {
- terminator = 0x5D;/* ] */
- isMapping = false;
- _result = [];
- } else if (ch === 0x7B/* { */) {
- terminator = 0x7D;/* } */
- isMapping = true;
- _result = {};
- } else {
- return false;
- }
-
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = _result;
- }
-
- ch = state.input.charCodeAt(++state.position);
-
- while (ch !== 0) {
- skipSeparationSpace(state, true, nodeIndent);
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch === terminator) {
- state.position++;
- state.tag = _tag;
- state.anchor = _anchor;
- state.kind = isMapping ? 'mapping' : 'sequence';
- state.result = _result;
- return true;
- } else if (!readNext) {
- throwError(state, 'missed comma between flow collection entries');
- } else if (ch === 0x2C/* , */) {
- // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4
- throwError(state, "expected the node content, but found ','");
- }
-
- keyTag = keyNode = valueNode = null;
- isPair = isExplicitPair = false;
-
- if (ch === 0x3F/* ? */) {
- following = state.input.charCodeAt(state.position + 1);
-
- if (is_WS_OR_EOL(following)) {
- isPair = isExplicitPair = true;
- state.position++;
- skipSeparationSpace(state, true, nodeIndent);
- }
- }
-
- _line = state.line; // Save the current line.
- _lineStart = state.lineStart;
- _pos = state.position;
- composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
- keyTag = state.tag;
- keyNode = state.result;
- skipSeparationSpace(state, true, nodeIndent);
-
- ch = state.input.charCodeAt(state.position);
-
- if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) {
- isPair = true;
- ch = state.input.charCodeAt(++state.position);
- skipSeparationSpace(state, true, nodeIndent);
- composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
- valueNode = state.result;
- }
-
- if (isMapping) {
- storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos);
- } else if (isPair) {
- _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos));
- } else {
- _result.push(keyNode);
- }
-
- skipSeparationSpace(state, true, nodeIndent);
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch === 0x2C/* , */) {
- readNext = true;
- ch = state.input.charCodeAt(++state.position);
- } else {
- readNext = false;
- }
- }
-
- throwError(state, 'unexpected end of the stream within a flow collection');
-}
-
-function readBlockScalar(state, nodeIndent) {
- var captureStart,
- folding,
- chomping = CHOMPING_CLIP,
- didReadContent = false,
- detectedIndent = false,
- textIndent = nodeIndent,
- emptyLines = 0,
- atMoreIndented = false,
- tmp,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch === 0x7C/* | */) {
- folding = false;
- } else if (ch === 0x3E/* > */) {
- folding = true;
- } else {
- return false;
- }
-
- state.kind = 'scalar';
- state.result = '';
-
- while (ch !== 0) {
- ch = state.input.charCodeAt(++state.position);
-
- if (ch === 0x2B/* + */ || ch === 0x2D/* - */) {
- if (CHOMPING_CLIP === chomping) {
- chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP;
- } else {
- throwError(state, 'repeat of a chomping mode identifier');
- }
-
- } else if ((tmp = fromDecimalCode(ch)) >= 0) {
- if (tmp === 0) {
- throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one');
- } else if (!detectedIndent) {
- textIndent = nodeIndent + tmp - 1;
- detectedIndent = true;
- } else {
- throwError(state, 'repeat of an indentation width identifier');
- }
-
- } else {
- break;
- }
- }
-
- if (is_WHITE_SPACE(ch)) {
- do { ch = state.input.charCodeAt(++state.position); }
- while (is_WHITE_SPACE(ch));
-
- if (ch === 0x23/* # */) {
- do { ch = state.input.charCodeAt(++state.position); }
- while (!is_EOL(ch) && (ch !== 0));
- }
- }
-
- while (ch !== 0) {
- readLineBreak(state);
- state.lineIndent = 0;
-
- ch = state.input.charCodeAt(state.position);
-
- while ((!detectedIndent || state.lineIndent < textIndent) &&
- (ch === 0x20/* Space */)) {
- state.lineIndent++;
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (!detectedIndent && state.lineIndent > textIndent) {
- textIndent = state.lineIndent;
- }
-
- if (is_EOL(ch)) {
- emptyLines++;
- continue;
- }
-
- // End of the scalar.
- if (state.lineIndent < textIndent) {
-
- // Perform the chomping.
- if (chomping === CHOMPING_KEEP) {
- state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
- } else if (chomping === CHOMPING_CLIP) {
- if (didReadContent) { // i.e. only if the scalar is not empty.
- state.result += '\n';
- }
- }
-
- // Break this `while` cycle and go to the funciton's epilogue.
- break;
- }
-
- // Folded style: use fancy rules to handle line breaks.
- if (folding) {
-
- // Lines starting with white space characters (more-indented lines) are not folded.
- if (is_WHITE_SPACE(ch)) {
- atMoreIndented = true;
- // except for the first content line (cf. Example 8.1)
- state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
-
- // End of more-indented block.
- } else if (atMoreIndented) {
- atMoreIndented = false;
- state.result += common.repeat('\n', emptyLines + 1);
-
- // Just one line break - perceive as the same line.
- } else if (emptyLines === 0) {
- if (didReadContent) { // i.e. only if we have already read some scalar content.
- state.result += ' ';
- }
-
- // Several line breaks - perceive as different lines.
- } else {
- state.result += common.repeat('\n', emptyLines);
- }
-
- // Literal style: just add exact number of line breaks between content lines.
- } else {
- // Keep all line breaks except the header line break.
- state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
- }
-
- didReadContent = true;
- detectedIndent = true;
- emptyLines = 0;
- captureStart = state.position;
-
- while (!is_EOL(ch) && (ch !== 0)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- captureSegment(state, captureStart, state.position, false);
- }
-
- return true;
-}
-
-function readBlockSequence(state, nodeIndent) {
- var _line,
- _tag = state.tag,
- _anchor = state.anchor,
- _result = [],
- following,
- detected = false,
- ch;
-
- // there is a leading tab before this token, so it can't be a block sequence/mapping;
- // it can still be flow sequence/mapping or a scalar
- if (state.firstTabInLine !== -1) return false;
-
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = _result;
- }
-
- ch = state.input.charCodeAt(state.position);
-
- while (ch !== 0) {
- if (state.firstTabInLine !== -1) {
- state.position = state.firstTabInLine;
- throwError(state, 'tab characters must not be used in indentation');
- }
-
- if (ch !== 0x2D/* - */) {
- break;
- }
-
- following = state.input.charCodeAt(state.position + 1);
-
- if (!is_WS_OR_EOL(following)) {
- break;
- }
-
- detected = true;
- state.position++;
-
- if (skipSeparationSpace(state, true, -1)) {
- if (state.lineIndent <= nodeIndent) {
- _result.push(null);
- ch = state.input.charCodeAt(state.position);
- continue;
- }
- }
-
- _line = state.line;
- composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true);
- _result.push(state.result);
- skipSeparationSpace(state, true, -1);
-
- ch = state.input.charCodeAt(state.position);
-
- if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {
- throwError(state, 'bad indentation of a sequence entry');
- } else if (state.lineIndent < nodeIndent) {
- break;
- }
- }
-
- if (detected) {
- state.tag = _tag;
- state.anchor = _anchor;
- state.kind = 'sequence';
- state.result = _result;
- return true;
- }
- return false;
-}
-
-function readBlockMapping(state, nodeIndent, flowIndent) {
- var following,
- allowCompact,
- _line,
- _keyLine,
- _keyLineStart,
- _keyPos,
- _tag = state.tag,
- _anchor = state.anchor,
- _result = {},
- overridableKeys = Object.create(null),
- keyTag = null,
- keyNode = null,
- valueNode = null,
- atExplicitKey = false,
- detected = false,
- ch;
-
- // there is a leading tab before this token, so it can't be a block sequence/mapping;
- // it can still be flow sequence/mapping or a scalar
- if (state.firstTabInLine !== -1) return false;
-
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = _result;
- }
-
- ch = state.input.charCodeAt(state.position);
-
- while (ch !== 0) {
- if (!atExplicitKey && state.firstTabInLine !== -1) {
- state.position = state.firstTabInLine;
- throwError(state, 'tab characters must not be used in indentation');
- }
-
- following = state.input.charCodeAt(state.position + 1);
- _line = state.line; // Save the current line.
-
- //
- // Explicit notation case. There are two separate blocks:
- // first for the key (denoted by "?") and second for the value (denoted by ":")
- //
- if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) {
-
- if (ch === 0x3F/* ? */) {
- if (atExplicitKey) {
- storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
- keyTag = keyNode = valueNode = null;
- }
-
- detected = true;
- atExplicitKey = true;
- allowCompact = true;
-
- } else if (atExplicitKey) {
- // i.e. 0x3A/* : */ === character after the explicit key.
- atExplicitKey = false;
- allowCompact = true;
-
- } else {
- throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line');
- }
-
- state.position += 1;
- ch = following;
-
- //
- // Implicit notation case. Flow-style node as the key first, then ":", and the value.
- //
- } else {
- _keyLine = state.line;
- _keyLineStart = state.lineStart;
- _keyPos = state.position;
-
- if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) {
- // Neither implicit nor explicit notation.
- // Reading is done. Go to the epilogue.
- break;
- }
-
- if (state.line === _line) {
- ch = state.input.charCodeAt(state.position);
-
- while (is_WHITE_SPACE(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (ch === 0x3A/* : */) {
- ch = state.input.charCodeAt(++state.position);
-
- if (!is_WS_OR_EOL(ch)) {
- throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping');
- }
-
- if (atExplicitKey) {
- storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
- keyTag = keyNode = valueNode = null;
- }
-
- detected = true;
- atExplicitKey = false;
- allowCompact = false;
- keyTag = state.tag;
- keyNode = state.result;
-
- } else if (detected) {
- throwError(state, 'can not read an implicit mapping pair; a colon is missed');
-
- } else {
- state.tag = _tag;
- state.anchor = _anchor;
- return true; // Keep the result of `composeNode`.
- }
-
- } else if (detected) {
- throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key');
-
- } else {
- state.tag = _tag;
- state.anchor = _anchor;
- return true; // Keep the result of `composeNode`.
- }
- }
-
- //
- // Common reading code for both explicit and implicit notations.
- //
- if (state.line === _line || state.lineIndent > nodeIndent) {
- if (atExplicitKey) {
- _keyLine = state.line;
- _keyLineStart = state.lineStart;
- _keyPos = state.position;
- }
-
- if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) {
- if (atExplicitKey) {
- keyNode = state.result;
- } else {
- valueNode = state.result;
- }
- }
-
- if (!atExplicitKey) {
- storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos);
- keyTag = keyNode = valueNode = null;
- }
-
- skipSeparationSpace(state, true, -1);
- ch = state.input.charCodeAt(state.position);
- }
-
- if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {
- throwError(state, 'bad indentation of a mapping entry');
- } else if (state.lineIndent < nodeIndent) {
- break;
- }
- }
-
- //
- // Epilogue.
- //
-
- // Special case: last mapping's node contains only the key in explicit notation.
- if (atExplicitKey) {
- storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
- }
-
- // Expose the resulting mapping.
- if (detected) {
- state.tag = _tag;
- state.anchor = _anchor;
- state.kind = 'mapping';
- state.result = _result;
- }
-
- return detected;
-}
-
-function readTagProperty(state) {
- var _position,
- isVerbatim = false,
- isNamed = false,
- tagHandle,
- tagName,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch !== 0x21/* ! */) return false;
-
- if (state.tag !== null) {
- throwError(state, 'duplication of a tag property');
- }
-
- ch = state.input.charCodeAt(++state.position);
-
- if (ch === 0x3C/* < */) {
- isVerbatim = true;
- ch = state.input.charCodeAt(++state.position);
-
- } else if (ch === 0x21/* ! */) {
- isNamed = true;
- tagHandle = '!!';
- ch = state.input.charCodeAt(++state.position);
-
- } else {
- tagHandle = '!';
- }
-
- _position = state.position;
-
- if (isVerbatim) {
- do { ch = state.input.charCodeAt(++state.position); }
- while (ch !== 0 && ch !== 0x3E/* > */);
-
- if (state.position < state.length) {
- tagName = state.input.slice(_position, state.position);
- ch = state.input.charCodeAt(++state.position);
- } else {
- throwError(state, 'unexpected end of the stream within a verbatim tag');
- }
- } else {
- while (ch !== 0 && !is_WS_OR_EOL(ch)) {
-
- if (ch === 0x21/* ! */) {
- if (!isNamed) {
- tagHandle = state.input.slice(_position - 1, state.position + 1);
-
- if (!PATTERN_TAG_HANDLE.test(tagHandle)) {
- throwError(state, 'named tag handle cannot contain such characters');
- }
-
- isNamed = true;
- _position = state.position + 1;
- } else {
- throwError(state, 'tag suffix cannot contain exclamation marks');
- }
- }
-
- ch = state.input.charCodeAt(++state.position);
- }
-
- tagName = state.input.slice(_position, state.position);
-
- if (PATTERN_FLOW_INDICATORS.test(tagName)) {
- throwError(state, 'tag suffix cannot contain flow indicator characters');
- }
- }
-
- if (tagName && !PATTERN_TAG_URI.test(tagName)) {
- throwError(state, 'tag name cannot contain such characters: ' + tagName);
- }
-
- try {
- tagName = decodeURIComponent(tagName);
- } catch (err) {
- throwError(state, 'tag name is malformed: ' + tagName);
- }
-
- if (isVerbatim) {
- state.tag = tagName;
-
- } else if (_hasOwnProperty$1.call(state.tagMap, tagHandle)) {
- state.tag = state.tagMap[tagHandle] + tagName;
-
- } else if (tagHandle === '!') {
- state.tag = '!' + tagName;
-
- } else if (tagHandle === '!!') {
- state.tag = 'tag:yaml.org,2002:' + tagName;
-
- } else {
- throwError(state, 'undeclared tag handle "' + tagHandle + '"');
- }
-
- return true;
-}
-
-function readAnchorProperty(state) {
- var _position,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch !== 0x26/* & */) return false;
-
- if (state.anchor !== null) {
- throwError(state, 'duplication of an anchor property');
- }
-
- ch = state.input.charCodeAt(++state.position);
- _position = state.position;
-
- while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (state.position === _position) {
- throwError(state, 'name of an anchor node must contain at least one character');
- }
-
- state.anchor = state.input.slice(_position, state.position);
- return true;
-}
-
-function readAlias(state) {
- var _position, alias,
- ch;
-
- ch = state.input.charCodeAt(state.position);
-
- if (ch !== 0x2A/* * */) return false;
-
- ch = state.input.charCodeAt(++state.position);
- _position = state.position;
-
- while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (state.position === _position) {
- throwError(state, 'name of an alias node must contain at least one character');
- }
-
- alias = state.input.slice(_position, state.position);
-
- if (!_hasOwnProperty$1.call(state.anchorMap, alias)) {
- throwError(state, 'unidentified alias "' + alias + '"');
- }
-
- state.result = state.anchorMap[alias];
- skipSeparationSpace(state, true, -1);
- return true;
-}
-
-function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) {
- var allowBlockStyles,
- allowBlockScalars,
- allowBlockCollections,
- indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) {
- indentStatus = 1;
- } else if (state.lineIndent === parentIndent) {
- indentStatus = 0;
- } else if (state.lineIndent < parentIndent) {
- indentStatus = -1;
- }
- }
- }
-
- if (indentStatus === 1) {
- while (readTagProperty(state) || readAnchorProperty(state)) {
- if (skipSeparationSpace(state, true, -1)) {
- atNewLine = true;
- allowBlockCollections = allowBlockStyles;
-
- if (state.lineIndent > parentIndent) {
- indentStatus = 1;
- } else if (state.lineIndent === parentIndent) {
- indentStatus = 0;
- } else if (state.lineIndent < parentIndent) {
- indentStatus = -1;
- }
- } else {
- allowBlockCollections = false;
- }
- }
- }
-
- if (allowBlockCollections) {
- allowBlockCollections = atNewLine || allowCompact;
- }
-
- if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) {
- if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) {
- flowIndent = parentIndent;
- } else {
- flowIndent = parentIndent + 1;
- }
-
- blockIndent = state.position - state.lineStart;
-
- if (indentStatus === 1) {
- if (allowBlockCollections &&
- (readBlockSequence(state, blockIndent) ||
- readBlockMapping(state, blockIndent, flowIndent)) ||
- readFlowCollection(state, flowIndent)) {
- hasContent = true;
- } else {
- if ((allowBlockScalars && readBlockScalar(state, flowIndent)) ||
- readSingleQuotedScalar(state, flowIndent) ||
- readDoubleQuotedScalar(state, flowIndent)) {
- hasContent = true;
-
- } else if (readAlias(state)) {
- hasContent = true;
-
- if (state.tag !== null || state.anchor !== null) {
- throwError(state, 'alias node should not have any properties');
- }
-
- } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) {
- hasContent = true;
-
- if (state.tag === null) {
- state.tag = '?';
- }
- }
-
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = state.result;
- }
- }
- } else if (indentStatus === 0) {
- // Special case: block sequences are allowed to have same indentation level as the parent.
- // http://www.yaml.org/spec/1.2/spec.html#id2799784
- hasContent = allowBlockCollections && readBlockSequence(state, blockIndent);
- }
- }
-
- if (state.tag === null) {
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = state.result;
- }
-
- } else if (state.tag === '?') {
- // Implicit resolving is not allowed for non-scalar types, and '?'
- // non-specific tag is only automatically assigned to plain scalars.
- //
- // We only need to check kind conformity in case user explicitly assigns '?'
- // tag, for example like this: "!> [0]"
- //
- if (state.result !== null && state.kind !== 'scalar') {
- throwError(state, 'unacceptable node kind for !> tag; it should be "scalar", not "' + state.kind + '"');
- }
-
- for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) {
- type = state.implicitTypes[typeIndex];
-
- if (type.resolve(state.result)) { // `state.result` updated in resolver if matched
- state.result = type.construct(state.result);
- state.tag = type.tag;
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = state.result;
- }
- break;
- }
- }
- } else if (state.tag !== '!') {
- if (_hasOwnProperty$1.call(state.typeMap[state.kind || 'fallback'], state.tag)) {
- type = state.typeMap[state.kind || 'fallback'][state.tag];
- } else {
- // looking for multi type
- type = null;
- typeList = state.typeMap.multi[state.kind || 'fallback'];
-
- for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) {
- if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) {
- type = typeList[typeIndex];
- break;
- }
- }
- }
-
- if (!type) {
- throwError(state, 'unknown tag !<' + state.tag + '>');
- }
-
- if (state.result !== null && type.kind !== state.kind) {
- throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"');
- }
-
- if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched
- throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag');
- } else {
- state.result = type.construct(state.result, state.tag);
- if (state.anchor !== null) {
- state.anchorMap[state.anchor] = state.result;
- }
- }
- }
-
- if (state.listener !== null) {
- state.listener('close', state);
- }
- return state.tag !== null || state.anchor !== null || hasContent;
-}
-
-function readDocument(state) {
- var documentStart = state.position,
- _position,
- directiveName,
- directiveArgs,
- hasDirectives = false,
- ch;
-
- state.version = null;
- state.checkLineBreaks = state.legacy;
- state.tagMap = Object.create(null);
- state.anchorMap = Object.create(null);
-
- while ((ch = state.input.charCodeAt(state.position)) !== 0) {
- skipSeparationSpace(state, true, -1);
-
- ch = state.input.charCodeAt(state.position);
-
- if (state.lineIndent > 0 || ch !== 0x25/* % */) {
- break;
- }
-
- hasDirectives = true;
- ch = state.input.charCodeAt(++state.position);
- _position = state.position;
-
- while (ch !== 0 && !is_WS_OR_EOL(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- directiveName = state.input.slice(_position, state.position);
- directiveArgs = [];
-
- if (directiveName.length < 1) {
- throwError(state, 'directive name must not be less than one character in length');
- }
-
- while (ch !== 0) {
- while (is_WHITE_SPACE(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- if (ch === 0x23/* # */) {
- do { ch = state.input.charCodeAt(++state.position); }
- while (ch !== 0 && !is_EOL(ch));
- break;
- }
-
- if (is_EOL(ch)) break;
-
- _position = state.position;
-
- while (ch !== 0 && !is_WS_OR_EOL(ch)) {
- ch = state.input.charCodeAt(++state.position);
- }
-
- directiveArgs.push(state.input.slice(_position, state.position));
- }
-
- if (ch !== 0) readLineBreak(state);
-
- if (_hasOwnProperty$1.call(directiveHandlers, directiveName)) {
- directiveHandlers[directiveName](state, directiveName, directiveArgs);
- } else {
- throwWarning(state, 'unknown document directive "' + directiveName + '"');
- }
- }
-
- skipSeparationSpace(state, true, -1);
-
- if (state.lineIndent === 0 &&
- state.input.charCodeAt(state.position) === 0x2D/* - */ &&
- state.input.charCodeAt(state.position + 1) === 0x2D/* - */ &&
- state.input.charCodeAt(state.position + 2) === 0x2D/* - */) {
- state.position += 3;
- skipSeparationSpace(state, true, -1);
-
- } else if (hasDirectives) {
- throwError(state, 'directives end mark is expected');
- }
-
- composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true);
- skipSeparationSpace(state, true, -1);
-
- if (state.checkLineBreaks &&
- PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) {
- throwWarning(state, 'non-ASCII line breaks are interpreted as content');
- }
-
- state.documents.push(state.result);
-
- if (state.position === state.lineStart && testDocumentSeparator(state)) {
-
- if (state.input.charCodeAt(state.position) === 0x2E/* . */) {
- state.position += 3;
- skipSeparationSpace(state, true, -1);
- }
- return;
- }
-
- if (state.position < (state.length - 1)) {
- throwError(state, 'end of the stream or a document separator is expected');
- } else {
- return;
- }
-}
-
-
-function loadDocuments(input, options) {
- input = String(input);
- options = options || {};
-
- if (input.length !== 0) {
-
- // Add tailing `\n` if not exists
- if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ &&
- input.charCodeAt(input.length - 1) !== 0x0D/* CR */) {
- input += '\n';
- }
-
- // Strip BOM
- if (input.charCodeAt(0) === 0xFEFF) {
- input = input.slice(1);
- }
- }
-
- var state = new State$1(input, options);
-
- var nullpos = input.indexOf('\0');
-
- if (nullpos !== -1) {
- state.position = nullpos;
- throwError(state, 'null byte is not allowed in input');
- }
-
- // Use 0 as string terminator. That significantly simplifies bounds check.
- state.input += '\0';
-
- while (state.input.charCodeAt(state.position) === 0x20/* Space */) {
- state.lineIndent += 1;
- state.position += 1;
- }
-
- while (state.position < (state.length - 1)) {
- readDocument(state);
- }
-
- return state.documents;
-}
-
-
-function loadAll$1(input, iterator, options) {
- if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') {
- options = iterator;
- iterator = null;
- }
-
- var documents = loadDocuments(input, options);
-
- if (typeof iterator !== 'function') {
- return documents;
- }
-
- for (var index = 0, length = documents.length; index < length; index += 1) {
- iterator(documents[index]);
- }
-}
-
-
-function load$1(input, options) {
- var documents = loadDocuments(input, options);
-
- if (documents.length === 0) {
- /*eslint-disable no-undefined*/
- return undefined;
- } else if (documents.length === 1) {
- return documents[0];
- }
- throw new exception('expected a single document in the stream, but found more');
-}
-
-
-var loadAll_1 = loadAll$1;
-var load_1 = load$1;
-
-var loader = {
- loadAll: loadAll_1,
- load: load_1
-};
-
-/*eslint-disable no-use-before-define*/
-
-
-
-
-
-var _toString = Object.prototype.toString;
-var _hasOwnProperty = Object.prototype.hasOwnProperty;
-
-var CHAR_BOM = 0xFEFF;
-var CHAR_TAB = 0x09; /* Tab */
-var CHAR_LINE_FEED = 0x0A; /* LF */
-var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */
-var CHAR_SPACE = 0x20; /* Space */
-var CHAR_EXCLAMATION = 0x21; /* ! */
-var CHAR_DOUBLE_QUOTE = 0x22; /* " */
-var CHAR_SHARP = 0x23; /* # */
-var CHAR_PERCENT = 0x25; /* % */
-var CHAR_AMPERSAND = 0x26; /* & */
-var CHAR_SINGLE_QUOTE = 0x27; /* ' */
-var CHAR_ASTERISK = 0x2A; /* * */
-var CHAR_COMMA = 0x2C; /* , */
-var CHAR_MINUS = 0x2D; /* - */
-var CHAR_COLON = 0x3A; /* : */
-var CHAR_EQUALS = 0x3D; /* = */
-var CHAR_GREATER_THAN = 0x3E; /* > */
-var CHAR_QUESTION = 0x3F; /* ? */
-var CHAR_COMMERCIAL_AT = 0x40; /* @ */
-var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */
-var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */
-var CHAR_GRAVE_ACCENT = 0x60; /* ` */
-var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */
-var CHAR_VERTICAL_LINE = 0x7C; /* | */
-var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */
-
-var ESCAPE_SEQUENCES = {};
-
-ESCAPE_SEQUENCES[0x00] = '\\0';
-ESCAPE_SEQUENCES[0x07] = '\\a';
-ESCAPE_SEQUENCES[0x08] = '\\b';
-ESCAPE_SEQUENCES[0x09] = '\\t';
-ESCAPE_SEQUENCES[0x0A] = '\\n';
-ESCAPE_SEQUENCES[0x0B] = '\\v';
-ESCAPE_SEQUENCES[0x0C] = '\\f';
-ESCAPE_SEQUENCES[0x0D] = '\\r';
-ESCAPE_SEQUENCES[0x1B] = '\\e';
-ESCAPE_SEQUENCES[0x22] = '\\"';
-ESCAPE_SEQUENCES[0x5C] = '\\\\';
-ESCAPE_SEQUENCES[0x85] = '\\N';
-ESCAPE_SEQUENCES[0xA0] = '\\_';
-ESCAPE_SEQUENCES[0x2028] = '\\L';
-ESCAPE_SEQUENCES[0x2029] = '\\P';
-
-var DEPRECATED_BOOLEANS_SYNTAX = [
- 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON',
- 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF'
-];
-
-var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/;
-
-function compileStyleMap(schema, map) {
- var result, keys, index, length, tag, style, type;
-
- if (map === null) return {};
-
- result = {};
- keys = Object.keys(map);
-
- for (index = 0, length = keys.length; index < length; index += 1) {
- tag = keys[index];
- style = String(map[tag]);
-
- if (tag.slice(0, 2) === '!!') {
- tag = 'tag:yaml.org,2002:' + tag.slice(2);
- }
- type = schema.compiledTypeMap['fallback'][tag];
-
- if (type && _hasOwnProperty.call(type.styleAliases, style)) {
- style = type.styleAliases[style];
- }
-
- result[tag] = style;
- }
-
- return result;
-}
-
-function encodeHex(character) {
- var string, handle, length;
-
- string = character.toString(16).toUpperCase();
-
- if (character <= 0xFF) {
- handle = 'x';
- length = 2;
- } else if (character <= 0xFFFF) {
- handle = 'u';
- length = 4;
- } else if (character <= 0xFFFFFFFF) {
- handle = 'U';
- length = 8;
- } else {
- throw new exception('code point within a string may not be greater than 0xFFFFFFFF');
- }
-
- return '\\' + handle + common.repeat('0', length - string.length) + string;
-}
-
-
-var QUOTING_TYPE_SINGLE = 1,
- QUOTING_TYPE_DOUBLE = 2;
-
-function State(options) {
- this.schema = options['schema'] || _default;
- this.indent = Math.max(1, (options['indent'] || 2));
- this.noArrayIndent = options['noArrayIndent'] || false;
- this.skipInvalid = options['skipInvalid'] || false;
- this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']);
- this.styleMap = compileStyleMap(this.schema, options['styles'] || null);
- this.sortKeys = options['sortKeys'] || false;
- this.lineWidth = options['lineWidth'] || 80;
- this.noRefs = options['noRefs'] || false;
- this.noCompatMode = options['noCompatMode'] || false;
- this.condenseFlow = options['condenseFlow'] || false;
- this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE;
- this.forceQuotes = options['forceQuotes'] || false;
- this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null;
-
- this.implicitTypes = this.schema.compiledImplicit;
- this.explicitTypes = this.schema.compiledExplicit;
-
- this.tag = null;
- this.result = '';
-
- this.duplicates = [];
- this.usedDuplicates = null;
-}
-
-// Indents every line in a string. Empty lines (\n only) are not indented.
-function indentString(string, spaces) {
- var ind = common.repeat(' ', spaces),
- position = 0,
- next = -1,
- result = '',
- line,
- length = string.length;
-
- while (position < length) {
- next = string.indexOf('\n', position);
- if (next === -1) {
- line = string.slice(position);
- position = length;
- } else {
- line = string.slice(position, next + 1);
- position = next + 1;
- }
-
- if (line.length && line !== '\n') result += ind;
-
- result += line;
- }
-
- return result;
-}
-
-function generateNextLine(state, level) {
- return '\n' + common.repeat(' ', state.indent * level);
-}
-
-function testImplicitResolving(state, str) {
- var index, length, type;
-
- for (index = 0, length = state.implicitTypes.length; index < length; index += 1) {
- type = state.implicitTypes[index];
-
- if (type.resolve(str)) {
- return true;
- }
- }
-
- return false;
-}
-
-// [33] s-white ::= s-space | s-tab
-function isWhitespace(c) {
- return c === CHAR_SPACE || c === CHAR_TAB;
-}
-
-// Returns true if the character can be printed without escaping.
-// From YAML 1.2: "any allowed characters known to be non-printable
-// should also be escaped. [However,] This isn’t mandatory"
-// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029.
-function isPrintable(c) {
- return (0x00020 <= c && c <= 0x00007E)
- || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029)
- || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM)
- || (0x10000 <= c && c <= 0x10FFFF);
-}
-
-// [34] ns-char ::= nb-char - s-white
-// [27] nb-char ::= c-printable - b-char - c-byte-order-mark
-// [26] b-char ::= b-line-feed | b-carriage-return
-// Including s-white (for some reason, examples doesn't match specs in this aspect)
-// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark
-function isNsCharOrWhitespace(c) {
- return isPrintable(c)
- && c !== CHAR_BOM
- // - b-char
- && c !== CHAR_CARRIAGE_RETURN
- && c !== CHAR_LINE_FEED;
-}
-
-// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out
-// c = flow-in ⇒ ns-plain-safe-in
-// c = block-key ⇒ ns-plain-safe-out
-// c = flow-key ⇒ ns-plain-safe-in
-// [128] ns-plain-safe-out ::= ns-char
-// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator
-// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” )
-// | ( /* An ns-char preceding */ “#” )
-// | ( “:” /* Followed by an ns-plain-safe(c) */ )
-function isPlainSafe(c, prev, inblock) {
- var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c);
- var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c);
- return (
- // ns-plain-safe
- inblock ? // c = flow-in
- cIsNsCharOrWhitespace
- : cIsNsCharOrWhitespace
- // - c-flow-indicator
- && c !== CHAR_COMMA
- && c !== CHAR_LEFT_SQUARE_BRACKET
- && c !== CHAR_RIGHT_SQUARE_BRACKET
- && c !== CHAR_LEFT_CURLY_BRACKET
- && c !== CHAR_RIGHT_CURLY_BRACKET
- )
- // ns-plain-char
- && c !== CHAR_SHARP // false on '#'
- && !(prev === CHAR_COLON && !cIsNsChar) // false on ': '
- || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#'
- || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]'
-}
-
-// Simplified test for values allowed as the first character in plain style.
-function isPlainSafeFirst(c) {
- // Uses a subset of ns-char - c-indicator
- // where ns-char = nb-char - s-white.
- // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part
- return isPrintable(c) && c !== CHAR_BOM
- && !isWhitespace(c) // - s-white
- // - (c-indicator ::=
- // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}”
- && c !== CHAR_MINUS
- && c !== CHAR_QUESTION
- && c !== CHAR_COLON
- && c !== CHAR_COMMA
- && c !== CHAR_LEFT_SQUARE_BRACKET
- && c !== CHAR_RIGHT_SQUARE_BRACKET
- && c !== CHAR_LEFT_CURLY_BRACKET
- && c !== CHAR_RIGHT_CURLY_BRACKET
- // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"”
- && c !== CHAR_SHARP
- && c !== CHAR_AMPERSAND
- && c !== CHAR_ASTERISK
- && c !== CHAR_EXCLAMATION
- && c !== CHAR_VERTICAL_LINE
- && c !== CHAR_EQUALS
- && c !== CHAR_GREATER_THAN
- && c !== CHAR_SINGLE_QUOTE
- && c !== CHAR_DOUBLE_QUOTE
- // | “%” | “@” | “`”)
- && c !== CHAR_PERCENT
- && c !== CHAR_COMMERCIAL_AT
- && c !== CHAR_GRAVE_ACCENT;
-}
-
-// Simplified test for values allowed as the last character in plain style.
-function isPlainSafeLast(c) {
- // just not whitespace or colon, it will be checked to be plain character later
- return !isWhitespace(c) && c !== CHAR_COLON;
-}
-
-// Same as 'string'.codePointAt(pos), but works in older browsers.
-function codePointAt(string, pos) {
- var first = string.charCodeAt(pos), second;
- if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) {
- second = string.charCodeAt(pos + 1);
- if (second >= 0xDC00 && second <= 0xDFFF) {
- // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
- return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;
- }
- }
- return first;
-}
-
-// Determines whether block indentation indicator is required.
-function needIndentIndicator(string) {
- var leadingSpaceRe = /^\n* /;
- return leadingSpaceRe.test(string);
-}
-
-var STYLE_PLAIN = 1,
- STYLE_SINGLE = 2,
- STYLE_LITERAL = 3,
- STYLE_FOLDED = 4,
- STYLE_DOUBLE = 5;
-
-// Determines which scalar styles are possible and returns the preferred style.
-// lineWidth = -1 => no limit.
-// Pre-conditions: str.length > 0.
-// Post-conditions:
-// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string.
-// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1).
-// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1).
-function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth,
- testAmbiguousType, quotingType, forceQuotes, inblock) {
-
- var i;
- var char = 0;
- var prevChar = null;
- var hasLineBreak = false;
- var hasFoldableLine = false; // only checked if shouldTrackWidth
- var shouldTrackWidth = lineWidth !== -1;
- var previousLineBreak = -1; // count the first line correctly
- var plain = isPlainSafeFirst(codePointAt(string, 0))
- && isPlainSafeLast(codePointAt(string, string.length - 1));
-
- if (singleLineOnly || forceQuotes) {
- // Case: no block styles.
- // Check for disallowed characters to rule out plain and single.
- for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
- char = codePointAt(string, i);
- if (!isPrintable(char)) {
- return STYLE_DOUBLE;
- }
- plain = plain && isPlainSafe(char, prevChar, inblock);
- prevChar = char;
- }
- } else {
- // Case: block styles permitted.
- for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
- char = codePointAt(string, i);
- if (char === CHAR_LINE_FEED) {
- hasLineBreak = true;
- // Check if any line can be folded.
- if (shouldTrackWidth) {
- hasFoldableLine = hasFoldableLine ||
- // Foldable line = too long, and not more-indented.
- (i - previousLineBreak - 1 > lineWidth &&
- string[previousLineBreak + 1] !== ' ');
- previousLineBreak = i;
- }
- } else if (!isPrintable(char)) {
- return STYLE_DOUBLE;
- }
- plain = plain && isPlainSafe(char, prevChar, inblock);
- prevChar = char;
- }
- // in case the end is missing a \n
- hasFoldableLine = hasFoldableLine || (shouldTrackWidth &&
- (i - previousLineBreak - 1 > lineWidth &&
- string[previousLineBreak + 1] !== ' '));
- }
- // Although every style can represent \n without escaping, prefer block styles
- // for multiline, since they're more readable and they don't add empty lines.
- // Also prefer folding a super-long line.
- if (!hasLineBreak && !hasFoldableLine) {
- // Strings interpretable as another type have to be quoted;
- // e.g. the string 'true' vs. the boolean true.
- if (plain && !forceQuotes && !testAmbiguousType(string)) {
- return STYLE_PLAIN;
- }
- return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;
- }
- // Edge case: block indentation indicator can only have one digit.
- if (indentPerLevel > 9 && needIndentIndicator(string)) {
- return STYLE_DOUBLE;
- }
- // At this point we know block styles are valid.
- // Prefer literal style unless we want to fold.
- if (!forceQuotes) {
- return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL;
- }
- return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;
-}
-
-// Note: line breaking/folding is implemented for only the folded style.
-// NB. We drop the last trailing newline (if any) of a returned block scalar
-// since the dumper adds its own newline. This always works:
-// • No ending newline => unaffected; already using strip "-" chomping.
-// • Ending newline => removed then restored.
-// Importantly, this keeps the "+" chomp indicator from gaining an extra line.
-function writeScalar(state, string, level, iskey, inblock) {
- state.dump = (function () {
- if (string.length === 0) {
- return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''";
- }
- if (!state.noCompatMode) {
- if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) {
- return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'");
- }
- }
-
- var indent = state.indent * Math.max(1, level); // no 0-indent scalars
- // As indentation gets deeper, let the width decrease monotonically
- // to the lower bound min(state.lineWidth, 40).
- // Note that this implies
- // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound.
- // state.lineWidth > 40 + state.indent: width decreases until the lower bound.
- // This behaves better than a constant minimum width which disallows narrower options,
- // or an indent threshold which causes the width to suddenly increase.
- var lineWidth = state.lineWidth === -1
- ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent);
-
- // Without knowing if keys are implicit/explicit, assume implicit for safety.
- var singleLineOnly = iskey
- // No block styles in flow mode.
- || (state.flowLevel > -1 && level >= state.flowLevel);
- function testAmbiguity(string) {
- return testImplicitResolving(state, string);
- }
-
- switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth,
- testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) {
-
- case STYLE_PLAIN:
- return string;
- case STYLE_SINGLE:
- return "'" + string.replace(/'/g, "''") + "'";
- case STYLE_LITERAL:
- return '|' + blockHeader(string, state.indent)
- + dropEndingNewline(indentString(string, indent));
- case STYLE_FOLDED:
- return '>' + blockHeader(string, state.indent)
- + dropEndingNewline(indentString(foldString(string, lineWidth), indent));
- case STYLE_DOUBLE:
- return '"' + escapeString(string) + '"';
- default:
- throw new exception('impossible error: invalid scalar style');
- }
- }());
-}
-
-// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9.
-function blockHeader(string, indentPerLevel) {
- var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : '';
-
- // note the special case: the string '\n' counts as a "trailing" empty line.
- var clip = string[string.length - 1] === '\n';
- var keep = clip && (string[string.length - 2] === '\n' || string === '\n');
- var chomp = keep ? '+' : (clip ? '' : '-');
-
- return indentIndicator + chomp + '\n';
-}
-
-// (See the note for writeScalar.)
-function dropEndingNewline(string) {
- return string[string.length - 1] === '\n' ? string.slice(0, -1) : string;
-}
-
-// Note: a long line without a suitable break point will exceed the width limit.
-// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0.
-function foldString(string, width) {
- // In folded style, $k$ consecutive newlines output as $k+1$ newlines—
- // unless they're before or after a more-indented line, or at the very
- // beginning or end, in which case $k$ maps to $k$.
- // Therefore, parse each chunk as newline(s) followed by a content line.
- var lineRe = /(\n+)([^\n]*)/g;
-
- // first line (possibly an empty line)
- var result = (function () {
- var nextLF = string.indexOf('\n');
- nextLF = nextLF !== -1 ? nextLF : string.length;
- lineRe.lastIndex = nextLF;
- return foldLine(string.slice(0, nextLF), width);
- }());
- // If we haven't reached the first content line yet, don't add an extra \n.
- var prevMoreIndented = string[0] === '\n' || string[0] === ' ';
- var moreIndented;
-
- // rest of the lines
- var match;
- while ((match = lineRe.exec(string))) {
- var prefix = match[1], line = match[2];
- moreIndented = (line[0] === ' ');
- result += prefix
- + (!prevMoreIndented && !moreIndented && line !== ''
- ? '\n' : '')
- + foldLine(line, width);
- prevMoreIndented = moreIndented;
- }
-
- return result;
-}
-
-// Greedy line breaking.
-// Picks the longest line under the limit each time,
-// otherwise settles for the shortest line over the limit.
-// NB. More-indented lines *cannot* be folded, as that would add an extra \n.
-function foldLine(line, width) {
- if (line === '' || line[0] === ' ') return line;
-
- // Since a more-indented line adds a \n, breaks can't be followed by a space.
- var breakRe = / [^ ]/g; // note: the match index will always be <= length-2.
- var match;
- // start is an inclusive index. end, curr, and next are exclusive.
- var start = 0, end, curr = 0, next = 0;
- var result = '';
-
- // Invariants: 0 <= start <= length-1.
- // 0 <= curr <= next <= max(0, length-2). curr - start <= width.
- // Inside the loop:
- // A match implies length >= 2, so curr and next are <= length-2.
- while ((match = breakRe.exec(line))) {
- next = match.index;
- // maintain invariant: curr - start <= width
- if (next - start > width) {
- end = (curr > start) ? curr : next; // derive end <= length-2
- result += '\n' + line.slice(start, end);
- // skip the space that was output as \n
- start = end + 1; // derive start <= length-1
- }
- curr = next;
- }
-
- // By the invariants, start <= length-1, so there is something left over.
- // It is either the whole string or a part starting from non-whitespace.
- result += '\n';
- // Insert a break if the remainder is too long and there is a break available.
- if (line.length - start > width && curr > start) {
- result += line.slice(start, curr) + '\n' + line.slice(curr + 1);
- } else {
- result += line.slice(start);
- }
-
- return result.slice(1); // drop extra \n joiner
-}
-
-// Escapes a double-quoted string.
-function escapeString(string) {
- var result = '';
- var char = 0;
- var escapeSeq;
-
- for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
- char = codePointAt(string, i);
- escapeSeq = ESCAPE_SEQUENCES[char];
-
- if (!escapeSeq && isPrintable(char)) {
- result += string[i];
- if (char >= 0x10000) result += string[i + 1];
- } else {
- result += escapeSeq || encodeHex(char);
- }
- }
-
- return result;
-}
-
-function writeFlowSequence(state, level, object) {
- var _result = '',
- _tag = state.tag,
- index,
- length,
- value;
-
- for (index = 0, length = object.length; index < length; index += 1) {
- value = object[index];
-
- if (state.replacer) {
- value = state.replacer.call(object, String(index), value);
- }
-
- // Write only valid elements, put null instead of invalid elements.
- if (writeNode(state, level, value, false, false) ||
- (typeof value === 'undefined' &&
- writeNode(state, level, null, false, false))) {
-
- if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : '');
- _result += state.dump;
- }
- }
-
- state.tag = _tag;
- state.dump = '[' + _result + ']';
-}
-
-function writeBlockSequence(state, level, object, compact) {
- var _result = '',
- _tag = state.tag,
- index,
- length,
- value;
-
- for (index = 0, length = object.length; index < length; index += 1) {
- value = object[index];
-
- if (state.replacer) {
- value = state.replacer.call(object, String(index), value);
- }
-
- // Write only valid elements, put null instead of invalid elements.
- if (writeNode(state, level + 1, value, true, true, false, true) ||
- (typeof value === 'undefined' &&
- writeNode(state, level + 1, null, true, true, false, true))) {
-
- if (!compact || _result !== '') {
- _result += generateNextLine(state, level);
- }
-
- if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
- _result += '-';
- } else {
- _result += '- ';
- }
-
- _result += state.dump;
- }
- }
-
- state.tag = _tag;
- state.dump = _result || '[]'; // Empty sequence if no valid values.
-}
-
-function writeFlowMapping(state, level, object) {
- var _result = '',
- _tag = state.tag,
- objectKeyList = Object.keys(object),
- index,
- length,
- objectKey,
- objectValue,
- pairBuffer;
-
- for (index = 0, length = objectKeyList.length; index < length; index += 1) {
-
- pairBuffer = '';
- if (_result !== '') pairBuffer += ', ';
-
- if (state.condenseFlow) pairBuffer += '"';
-
- objectKey = objectKeyList[index];
- objectValue = object[objectKey];
-
- if (state.replacer) {
- objectValue = state.replacer.call(object, objectKey, objectValue);
- }
-
- if (!writeNode(state, level, objectKey, false, false)) {
- continue; // Skip this pair because of invalid key;
- }
-
- if (state.dump.length > 1024) pairBuffer += '? ';
-
- pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' ');
-
- if (!writeNode(state, level, objectValue, false, false)) {
- continue; // Skip this pair because of invalid value.
- }
-
- pairBuffer += state.dump;
-
- // Both key and value are valid.
- _result += pairBuffer;
- }
-
- state.tag = _tag;
- state.dump = '{' + _result + '}';
-}
-
-function writeBlockMapping(state, level, object, compact) {
- var _result = '',
- _tag = state.tag,
- objectKeyList = Object.keys(object),
- index,
- length,
- objectKey,
- objectValue,
- explicitPair,
- pairBuffer;
-
- // Allow sorting keys so that the output file is deterministic
- if (state.sortKeys === true) {
- // Default sorting
- objectKeyList.sort();
- } else if (typeof state.sortKeys === 'function') {
- // Custom sort function
- objectKeyList.sort(state.sortKeys);
- } else if (state.sortKeys) {
- // Something is wrong
- throw new exception('sortKeys must be a boolean or a function');
- }
-
- for (index = 0, length = objectKeyList.length; index < length; index += 1) {
- pairBuffer = '';
-
- if (!compact || _result !== '') {
- pairBuffer += generateNextLine(state, level);
- }
-
- objectKey = objectKeyList[index];
- objectValue = object[objectKey];
-
- if (state.replacer) {
- objectValue = state.replacer.call(object, objectKey, objectValue);
- }
-
- if (!writeNode(state, level + 1, objectKey, true, true, true)) {
- continue; // Skip this pair because of invalid key.
- }
-
- explicitPair = (state.tag !== null && state.tag !== '?') ||
- (state.dump && state.dump.length > 1024);
-
- if (explicitPair) {
- if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
- pairBuffer += '?';
- } else {
- pairBuffer += '? ';
- }
- }
-
- pairBuffer += state.dump;
-
- if (explicitPair) {
- pairBuffer += generateNextLine(state, level);
- }
-
- if (!writeNode(state, level + 1, objectValue, true, explicitPair)) {
- continue; // Skip this pair because of invalid value.
- }
-
- if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
- pairBuffer += ':';
- } else {
- pairBuffer += ': ';
- }
-
- pairBuffer += state.dump;
-
- // Both key and value are valid.
- _result += pairBuffer;
- }
-
- state.tag = _tag;
- state.dump = _result || '{}'; // Empty mapping if no valid pairs.
-}
-
-function detectType(state, object, explicit) {
- var _result, typeList, index, length, type, style;
-
- typeList = explicit ? state.explicitTypes : state.implicitTypes;
-
- for (index = 0, length = typeList.length; index < length; index += 1) {
- type = typeList[index];
-
- if ((type.instanceOf || type.predicate) &&
- (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) &&
- (!type.predicate || type.predicate(object))) {
-
- if (explicit) {
- if (type.multi && type.representName) {
- state.tag = type.representName(object);
- } else {
- state.tag = type.tag;
- }
- } else {
- state.tag = '?';
- }
-
- if (type.represent) {
- style = state.styleMap[type.tag] || type.defaultStyle;
-
- if (_toString.call(type.represent) === '[object Function]') {
- _result = type.represent(object, style);
- } else if (_hasOwnProperty.call(type.represent, style)) {
- _result = type.represent[style](object, style);
- } else {
- throw new exception('!<' + type.tag + '> tag resolver accepts not "' + style + '" style');
- }
-
- state.dump = _result;
- }
-
- return true;
- }
- }
-
- return false;
-}
-
-// Serializes `object` and writes it to global `result`.
-// Returns true on success, or false on invalid object.
-//
-function writeNode(state, level, object, block, compact, iskey, isblockseq) {
- state.tag = null;
- state.dump = object;
-
- if (!detectType(state, object, false)) {
- detectType(state, object, true);
- }
-
- var type = _toString.call(state.dump);
- var inblock = block;
- var tagStr;
-
- if (block) {
- block = (state.flowLevel < 0 || state.flowLevel > level);
- }
-
- var objectOrArray = type === '[object Object]' || type === '[object Array]',
- duplicateIndex,
- duplicate;
-
- if (objectOrArray) {
- duplicateIndex = state.duplicates.indexOf(object);
- duplicate = duplicateIndex !== -1;
- }
-
- if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) {
- compact = false;
- }
-
- if (duplicate && state.usedDuplicates[duplicateIndex]) {
- state.dump = '*ref_' + duplicateIndex;
- } else {
- if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) {
- state.usedDuplicates[duplicateIndex] = true;
- }
- if (type === '[object Object]') {
- if (block && (Object.keys(state.dump).length !== 0)) {
- writeBlockMapping(state, level, state.dump, compact);
- if (duplicate) {
- state.dump = '&ref_' + duplicateIndex + state.dump;
- }
- } else {
- writeFlowMapping(state, level, state.dump);
- if (duplicate) {
- state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;
- }
- }
- } else if (type === '[object Array]') {
- if (block && (state.dump.length !== 0)) {
- if (state.noArrayIndent && !isblockseq && level > 0) {
- writeBlockSequence(state, level - 1, state.dump, compact);
- } else {
- writeBlockSequence(state, level, state.dump, compact);
- }
- if (duplicate) {
- state.dump = '&ref_' + duplicateIndex + state.dump;
- }
- } else {
- writeFlowSequence(state, level, state.dump);
- if (duplicate) {
- state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;
- }
- }
- } else if (type === '[object String]') {
- if (state.tag !== '?') {
- writeScalar(state, state.dump, level, iskey, inblock);
- }
- } else if (type === '[object Undefined]') {
- return false;
- } else {
- if (state.skipInvalid) return false;
- throw new exception('unacceptable kind of an object to dump ' + type);
- }
-
- if (state.tag !== null && state.tag !== '?') {
- // Need to encode all characters except those allowed by the spec:
- //
- // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */
- // [36] ns-hex-digit ::= ns-dec-digit
- // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */
- // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */
- // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-”
- // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#”
- // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,”
- // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]”
- //
- // Also need to encode '!' because it has special meaning (end of tag prefix).
- //
- tagStr = encodeURI(
- state.tag[0] === '!' ? state.tag.slice(1) : state.tag
- ).replace(/!/g, '%21');
-
- if (state.tag[0] === '!') {
- tagStr = '!' + tagStr;
- } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') {
- tagStr = '!!' + tagStr.slice(18);
- } else {
- tagStr = '!<' + tagStr + '>';
- }
-
- state.dump = tagStr + ' ' + state.dump;
- }
- }
-
- return true;
-}
-
-function getDuplicateReferences(object, state) {
- var objects = [],
- duplicatesIndexes = [],
- index,
- length;
-
- inspectNode(object, objects, duplicatesIndexes);
-
- for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) {
- state.duplicates.push(objects[duplicatesIndexes[index]]);
- }
- state.usedDuplicates = new Array(length);
-}
-
-function inspectNode(object, objects, duplicatesIndexes) {
- var objectKeyList,
- index,
- length;
-
- if (object !== null && typeof object === 'object') {
- index = objects.indexOf(object);
- if (index !== -1) {
- if (duplicatesIndexes.indexOf(index) === -1) {
- duplicatesIndexes.push(index);
- }
- } else {
- objects.push(object);
-
- if (Array.isArray(object)) {
- for (index = 0, length = object.length; index < length; index += 1) {
- inspectNode(object[index], objects, duplicatesIndexes);
- }
- } else {
- objectKeyList = Object.keys(object);
-
- for (index = 0, length = objectKeyList.length; index < length; index += 1) {
- inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes);
- }
- }
- }
- }
-}
-
-function dump$1(input, options) {
- options = options || {};
-
- var state = new State(options);
-
- if (!state.noRefs) getDuplicateReferences(input, state);
-
- var value = input;
-
- if (state.replacer) {
- value = state.replacer.call({ '': value }, '', value);
- }
-
- if (writeNode(state, 0, value, true, true)) return state.dump + '\n';
-
- return '';
-}
-
-var dump_1 = dump$1;
-
-var dumper = {
- dump: dump_1
-};
-
-function renamed(from, to) {
- return function () {
- throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' +
- 'Use yaml.' + to + ' instead, which is now safe by default.');
- };
-}
-
-
-var Type = type;
-var Schema = schema;
-var FAILSAFE_SCHEMA = failsafe;
-var JSON_SCHEMA = json;
-var CORE_SCHEMA = js_yaml_core;
-var DEFAULT_SCHEMA = _default;
-var load = loader.load;
-var loadAll = loader.loadAll;
-var dump = dumper.dump;
-var YAMLException = exception;
-
-// Re-export all types in case user wants to create custom schema
-var types = {
- binary: binary,
- float: js_yaml_float,
- map: map,
- null: _null,
- pairs: pairs,
- set: set,
- timestamp: timestamp,
- bool: bool,
- int: js_yaml_int,
- merge: merge,
- omap: omap,
- seq: seq,
- str: str
-};
-
-// Removed functions from JS-YAML 3.0.x
-var safeLoad = renamed('safeLoad', 'load');
-var safeLoadAll = renamed('safeLoadAll', 'loadAll');
-var safeDump = renamed('safeDump', 'dump');
-
-var jsYaml = {
- Type: Type,
- Schema: Schema,
- FAILSAFE_SCHEMA: FAILSAFE_SCHEMA,
- JSON_SCHEMA: JSON_SCHEMA,
- CORE_SCHEMA: CORE_SCHEMA,
- DEFAULT_SCHEMA: DEFAULT_SCHEMA,
- load: load,
- loadAll: loadAll,
- dump: dump,
- YAMLException: YAMLException,
- types: types,
- safeLoad: safeLoad,
- safeLoadAll: safeLoadAll,
- safeDump: safeDump
-};
-
-/* harmony default export */ const js_yaml = (jsYaml);
-
-
-;// CONCATENATED MODULE: ./adapters/integrations/atlan-contract-impact-analysis-github.js
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-var headSHA;
-const integrationName = "GITHUB_CONTRACT_IMPACT_ANALYSIS";
-const actionName = "contract_ci_action"
-const utmSource = "dbt_github_action"
-
-class ContractIntegration extends IntegrationInterface {
- constructor(token) {
- super(token);
- }
-
- async run() {
- try {
- const timeStart = Date.now();
- const { context } = github;
-
- const octokit = github.getOctokit(this.token);
- const { pull_request } = context?.payload;
- const { state, merged } = pull_request;
- headSHA = pull_request?.head?.sha;
-
- logger_logger.withInfo(
- "GITHUB_CONTRACT_IMPACT_ANALYSIS is running...",
- integrationName,
- headSHA,
- "run"
- );
-
-
- if (!(await this.authIntegration({ octokit, context }))) {
- logger_logger.withError(
- "Authentication failed. Wrong API Token.",
- integrationName,
- headSHA,
- "run"
- );
- throw { message: "Wrong API Token" };
- }
-
- let total_assets = 0;
-
- if (state === "open") {
- total_assets = await this.printDownstreamAssets({ octokit, context });
- }
-
- if (total_assets !== 0) {
- await this.sendSegmentEventOfIntegration({
- action: `${actionName}_run`,
- properties: {
- asset_count: total_assets,
- total_time: Date.now() - timeStart,
- },
- });
- }
-
- logger_logger.withInfo(
- "Successfully Completed GITHUB_CONTRACT_IMPACT_ANALYSIS",
- integrationName,
- headSHA,
- "run"
- );
- } catch (error) {
- logger_logger.withError(
- `Error in run(): ${error.message}`,
- integrationName,
- headSHA,
- "run"
- );
- throw error;
- }
- }
-
- async printDownstreamAssets({ octokit, context }) {
- logger_logger.withInfo(
- "Printing downstream assets...",
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- try {
- const changedFiles = await this.getChangedFiles({ octokit, context });
- let comments = ``;
- let warningComments = ``;
- let totalChangedFiles = 0;
-
- const atlanConfig = ATLAN_CONFIG;
-
- // Read atlan config file
- const config = this.readYamlFile(atlanConfig);
- if (config.error) {
- logger_logger.withError(
- `Failed to read atlan config file ${atlanConfig}: ${config.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- return;
- }
-
- let datasources = this.parseDatasourceFromConfig(config.contentYaml)
-
- // If no datasources found, do not proceed
- if (datasources.size <= 0) {
- logger_logger.withError(
- `No datasources found in atlan config ${atlanConfig}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- return;
- }
-
- for (const { fileName, filePath, status } of changedFiles) {
- // Skipping non yaml files
- if (!filePath.endsWith('.yaml') && !filePath.endsWith('.yml')) {
- logger_logger.withInfo(
- `Skipping file: ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue
- }
-
- logger_logger.withInfo(
- `Processing file: ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- const contract = this.readYamlFile(filePath);
- if (contract.error) {
- logger_logger.withError(
- `Failed to read yaml file ${filePath}: ${contract.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue
- }
-
- let dataset = contract.contentYaml.dataset
- // Skip non contract yaml file
- if (!dataset) {
- continue
- }
-
- const assetQualifiedName = this.getQualifiedName(
- datasources,
- contract.contentYaml
- );
-
- if (assetQualifiedName === undefined) {
- logger_logger.withError(
- `Failed to construct asset qualified name for contract ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- continue;
- }
-
- logger_logger.withInfo(
- `Generated asset qualified name ${assetQualifiedName} for contract ${filePath}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- // Fetch asset from Atlan
- const asset = await getContractAsset({
- dataset,
- assetQualifiedName: assetQualifiedName
- });
-
- if (asset.error) {
- logger_logger.withError(
- `Assets fetch error for ${dataset}: ${asset.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_get_asset",
- asset_name: dataset,
- msg: asset.error,
- },
- });
-
- totalChangedFiles++
- warningComments += asset.comment;
- warningComments += "\n\n---\n\n"
- continue;
- }
-
- logger_logger.withInfo(
- `Processing asset: ${dataset}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- const timeStart = Date.now();
- const totalModifiedFiles = changedFiles.filter(
- (i) => i.status === "modified"
- ).length;
-
- // Fetch downstream assets
- const downstreamAssets = await getDownstreamAssets(
- asset,
- asset.guid,
- totalModifiedFiles,
- utmSource
- );
-
- if (downstreamAssets.error) {
- logger_logger.withError(
- `Downstream assets error for ${dataset}: ${downstreamAssets.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_fetch_lineage",
- asset_guid: asset.guid,
- asset_name: asset.name,
- asset_typeName: asset.typeName,
- msg: downstreamAssets.error,
- },
- });
-
- totalChangedFiles++
- warningComments += downstreamAssets.comment;
- warningComments += "\n\n---\n\n"
- continue;
- }
-
- // Send segment event for successful downstream asset fetch
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_downstream_unfurl`,
- properties: {
- asset_guid: asset.guid,
- asset_type: asset.typeName,
- downstream_count: downstreamAssets.entities.length,
- total_fetch_time: Date.now() - timeStart,
- },
- });
-
- // Fetch classification for asset
- const classifications = await getAssetClassifications()
-
- if (classifications.error) {
- logger_logger.withError(
- `Failed to fetch cllassification for ${assetObj["name"]}: ${classifications.error}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- this.sendSegmentEventOfIntegration({
- action: `${actionName}_failure`,
- properties: {
- reason: "failed_to_get_classifications",
- msg: classifications.error,
- },
- });
- }
-
- // Construct comment for displaying downstream assets
- const comment = await this.renderDownstreamAssetsComment({
- asset,
- downstreamAssets,
- classifications,
- });
-
- comments += comment;
-
- if (comment.trim() !== "") {
- comments += "\n\n---\n\n";
- }
-
- totalChangedFiles++;
- }
-
- // Add header comment before asset info comments
- comments = getContractImpactAnalysisBaseComment(
- totalChangedFiles,
- comments,
- warningComments
- );
-
- const existingComment = await this.checkCommentExists({
- octokit,
- context,
- });
-
- logger_logger.withInfo(
- `Existing Comment: ${existingComment?.id}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- if (totalChangedFiles > 0)
- await this.createIssueComment({
- octokit,
- context,
- content: comments,
- comment_id: existingComment?.id,
- });
-
- if (totalChangedFiles === 0 && existingComment)
- await this.deleteComment({
- octokit,
- context,
- comment_id: existingComment?.id,
- });
-
- logger_logger.withInfo(
- "Successfully printed Downstream Assets",
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
-
- return totalChangedFiles;
- } catch (error) {
- logger_logger.withError(
- `Error in printDownstreamAssets: ${error.message}`,
- integrationName,
- headSHA,
- "printDownstreamAssets"
- );
- throw error;
- }
- }
-
- async authIntegration({ octokit, context }) {
- logger_logger.withInfo(
- "Authenticating with Atlan",
- integrationName,
- headSHA,
- "authIntegration"
- );
-
- try {
- const response = await auth();
-
- const existingComment = await this.checkCommentExists({
- octokit,
- context,
- });
-
- logger_logger.withInfo(
- `Existing Comment: ${existingComment?.id}`,
- integrationName,
- headSHA,
- "authIntegration"
- );
-
- if (response?.status === 401) {
- logger_logger.withError(
- "Authentication failed: Status 401",
- integrationName,
- headSHA,
- "authIntegration"
- );
- await this.createIssueComment({
- octokit,
- context,
- content: getErrorResponseStatus401(get_environment_variables_ATLAN_INSTANCE_URL, context),
- comment_id: existingComment?.id,
- });
- return false;
- }
-
- if (response === undefined) {
- logger_logger.withError(
- "Authentication failed: Undefined response",
- integrationName,
- headSHA,
- "authIntegration"
- );
- await this.createIssueComment({
- octokit,
- context,
- content: getErrorResponseStatusUndefined(get_environment_variables_ATLAN_INSTANCE_URL, context),
- comment_id: existingComment?.id,
- });
- return false;
- }
- logger_logger.withInfo(
- "Successfully Authenticated with Atlan",
- integrationName,
- headSHA,
- "authIntegration"
- );
- return true;
- } catch (error) {
- logger_logger.withError(
- `Error in authIntegration: ${error.message}`,
- integrationName,
- headSHA,
- "authIntegration"
- );
- throw error;
- }
- }
-
- async sendSegmentEventOfIntegration({ action, properties }) {
- try {
- const domain = new URL(get_environment_variables_ATLAN_INSTANCE_URL).hostname;
- const { context } = github; //confirm this
- logger_logger.withInfo(
- `Sending Segment event for action: ${action}`,
- integrationName,
- headSHA,
- "sendSegmentEventOfIntegration"
- );
-
- const raw = json_stringify_safe_stringify({
- category: "integration",
- object: "github",
- action,
- userId: "atlan-annonymous-github",
- properties: {
- ...properties,
- github_action_id: `https://github.com/${context?.payload?.repository?.full_name}/actions/runs/${context?.runId}`,
- domain,
- },
- });
-
- return sendSegmentEvent(action, raw);
- } catch (error) {
- logger_logger.withError(
- `Error sending Segment event for action: ${action} - ${error.message}`,
- integrationName,
- headSHA,
- "sendSegmentEventOfIntegration"
- );
- throw error;
- }
- }
-
- async getChangedFiles({ octokit, context }) {
- try {
- logger_logger.withInfo(
- "Fetching changed files...",
- integrationName,
- headSHA,
- "getChangedFiles"
- );
-
- const { repository, pull_request } = context.payload,
- owner = repository.owner.login,
- repo = repository.name,
- pull_number = pull_request.number;
-
- const res = await octokit.request(
- `GET /repos/${owner}/${repo}/pulls/${pull_number}/files`,
- {
- owner,
- repo,
- pull_number,
- }
- );
-
- var changedFiles = res.data
- .map(({ filename, status }) => {
- try {
- const isYamlFile = filename.match(/\.(yaml|yml)$/);
-
- if (isYamlFile) {
- const contractName = filename.split('/').pop().replace(/\.(yaml|yml)$/, '');
- return {
- fileName: contractName,
- filePath: filename,
- status,
- };
- }
- } catch (e) {
- logger_logger.withError(
- `Error processing file: ${filename} - ${e.message}`,
- integrationName,
- headSHA,
- "getChangedFiles"
- );
- }
- })
- .filter((i) => i !== undefined);
-
- changedFiles = changedFiles.filter((item, index) => {
- return (
- changedFiles.findIndex((obj) => obj.fileName === item.fileName) ===
- index
- );
- });
-
- logger_logger.withInfo(
- "Successfully fetched changed files",
- integrationName,
- headSHA,
- "getChangedFiles"
- );
-
- return changedFiles;
- } catch (error) {
- logger_logger.withError(
- `Error fetching changed files - ${error.message}`,
- integrationName,
- headSHA,
- "getChangedFiles"
- );
- throw error;
- }
- }
-
- async checkCommentExists({ octokit, context }) {
- logger_logger.withInfo(
- "Checking for existing comments...",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
-
- if (IS_DEV) {
- logger_logger.withInfo(
- "Development mode enabled. Skipping comment check.",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- return null;
- }
-
- const { pull_request } = context.payload;
-
- try {
- const comments = await octokit.rest.issues.listComments({
- ...context.repo,
- issue_number: pull_request.number,
- });
-
- const existingComment = comments.data.find(
- (comment) =>
- comment.user.login === "github-actions[bot]" &&
- comment.body.includes(
- ""
- )
- );
- if (existingComment) {
- logger_logger.withInfo(
- "Found existing comment: " + existingComment?.id,
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- } else {
- logger_logger.withInfo(
- "No existing comment found",
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- }
-
- return existingComment;
- } catch (error) {
- logger_logger.withError(
- "Error checking for existing comments: " + error.message,
- integrationName,
- headSHA,
- "checkCommentExists"
- );
- throw error;
- }
- }
-
- async createIssueComment({
- octokit,
- context,
- content,
- comment_id = null,
- forceNewComment = false,
- }) {
- logger_logger.withInfo(
- "Creating an issue comment...",
- integrationName,
- headSHA,
- "createIssueComment"
- );
-
- const { pull_request } = context?.payload || {};
-
- content = `
-${content}`;
-
- const commentObj = {
- ...context.repo,
- issue_number: pull_request.number,
- body: content,
- };
-
- if (IS_DEV) {
- logger_logger.withInfo(
- "Development mode enabled. Skipping comment creation.",
- integrationName,
- headSHA,
- "createIssueComment"
- );
-
- return content;
- }
-
- if (comment_id && !forceNewComment)
- return octokit.rest.issues.updateComment({ ...commentObj, comment_id });
- return octokit.rest.issues.createComment(commentObj);
- }
-
- async deleteComment({ octokit, context, comment_id }) {
- logger_logger.withInfo(
- `Deleted comment with ID ${comment_id}`,
- integrationName,
- headSHA,
- "deleteComment"
- );
-
- const { pull_request } = context.payload;
-
- return octokit.rest.issues.deleteComment({
- ...context.repo,
- issue_number: pull_request.number,
- comment_id,
- });
- }
-
- async renderDownstreamAssetsComment({
- asset,
- downstreamAssets,
- classifications,
- }) {
- logger_logger.withInfo(
- "Rendering Downstream Assets...",
- integrationName,
- headSHA,
- "renderDownstreamAssetsComment"
- );
- try {
- let impactedData = downstreamAssets.entities.map(
- ({
- displayText,
- guid,
- typeName,
- attributes,
- meanings,
- classificationNames,
- }) => {
- // Modifying the typeName and getting the readableTypeName
- let readableTypeName = typeName
- .toLowerCase()
- .replace(attributes.connectorName, "")
- .toUpperCase();
-
- // Filtering classifications based on classificationNames
- let classificationsObj = classifications.filter(({ name }) =>
- classificationNames.includes(name)
- );
-
- // Modifying the readableTypeName
- readableTypeName =
- readableTypeName.charAt(0).toUpperCase() +
- readableTypeName.slice(1).toLowerCase();
-
- return [
- guid,
- truncate(displayText),
- truncate(attributes.connectorName),
- truncate(readableTypeName),
- truncate(
- attributes?.userDescription || attributes?.description || ""
- ),
- attributes?.certificateStatus || "",
- truncate(
- [...attributes?.ownerUsers, ...attributes?.ownerGroups] || []
- ),
- truncate(
- meanings.map(
- ({ displayText, termGuid }) =>
- `[${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_github_action)`
- )
- ),
- truncate(
- classificationsObj?.map(
- ({ name, displayName }) => `\`${displayName}\``
- )
- ),
- attributes?.sourceURL || "",
- ];
- }
- );
-
- // Sorting the impactedData first by typeName and then by connectorName
- impactedData = impactedData.sort((a, b) => a[3].localeCompare(b[3]));
- impactedData = impactedData.sort((a, b) => a[2].localeCompare(b[2]));
-
- // Creating rows for the downstream table
- let rows = impactedData.map(
- ([
- guid,
- displayText,
- connectorName,
- typeName,
- description,
- certificateStatus,
- owners,
- meanings,
- classifications,
- sourceUrl,
- ]) => {
- // Getting connector and certification images
- const connectorImage = get_image_url_getConnectorImage(connectorName);
- const certificationImage = certificateStatus
- ? get_image_url_getCertificationImage(certificateStatus)
- : "";
-
- return [
- `${connectorImage} [${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_github_action) ${certificationImage}`,
- `\`${typeName}\``,
- description,
- owners,
- meanings,
- classifications,
- sourceUrl ? `[Open in ${connectorName}](${sourceUrl})` : " ",
- ];
- }
- );
+}
- const assetInfo = getContractAssetInfo(
- get_environment_variables_ATLAN_INSTANCE_URL,
- asset
- );
+;// CONCATENATED MODULE: ./adapters/api/index.js
- // Generating the downstream table
- const downstreamTable = getDownstreamTable(
- get_environment_variables_ATLAN_INSTANCE_URL,
- downstreamAssets,
- rows,
- asset
- );
- // Generating the "View asset in Atlan" button
- const viewAssetButton = getViewAssetButton(get_environment_variables_ATLAN_INSTANCE_URL, asset);
- // Generating the final comment based on the presence of downstream assets
- if (downstreamAssets.entityCount > 0) {
- return `${assetInfo}
-${downstreamTable}
-${viewAssetButton}`;
- } else {
- return `${assetInfo}
-No downstream assets found.
-${viewAssetButton}`;
- }
- } catch (error) {
- logger_logger.withError(
- `Error rendering Downstream Assets: ${error.message}`,
- integrationName,
- headSHA,
- "renderDownstreamAssetsComment"
- );
- throw error;
- }
- }
+;// CONCATENATED MODULE: ./adapters/templates/github-integration.js
- readYamlFile(filePath) {
- try {
- // Read file content synchronously
- const data = external_fs_.readFileSync(filePath, 'utf8');
-
- // Parse the YAML data
- const parsedData = js_yaml.load(data);
-
- // Return parsed data
- return {
- contentString: data,
- contentYaml: parsedData
- };
- } catch (err) {
- return {
- error: err
- };
- }
- }
- parseDatasourceFromConfig(configYaml) {
- // Create a Map for keys starting with "data_source "
- const dataSourceMap = new Map();
+function getErrorResponseStatus401 (ATLAN_INSTANCE_URL, context) {
+ return `We couldn't connect to your Atlan Instance, please make sure to set the valid Atlan Bearer Token as \`ATLAN_API_TOKEN\` as this repository's action secret.
- // Iterate through the object to find relevant keys
- for (const [key, value] of Object.entries(configYaml)) {
- if (key.startsWith('data_source ')) {
- // Trim the prefix and add to the Map
- const trimmedKey = key.replace('data_source ', '');
- dataSourceMap.set(trimmedKey, value);
- }
- }
+Atlan Instance URL: ${ATLAN_INSTANCE_URL}
+
+Set your repository action secrets [here](https://github.com/${context.payload.repository.full_name}/settings/secrets/actions). For more information on how to setup the Atlan dbt Action, please read the [setup documentation here](https://github.com/atlanhq/dbt-action/blob/main/README.md).`
+}
- return dataSourceMap;
- }
+function getErrorResponseStatusUndefined(ATLAN_INSTANCE_URL, context) {
+ return `We couldn't connect to your Atlan Instance, please make sure to set the valid Atlan Instance URL as \`ATLAN_INSTANCE_URL\` as this repository's action secret.
- getQualifiedName(datasources, contractYaml) {
- if (contractYaml["data_source"] === undefined) {
- return;
- }
+Atlan Instance URL: ${ATLAN_INSTANCE_URL}
+
+Make sure your Atlan Instance URL is set in the following format.
+\`https://tenant.atlan.com\`
+
+Set your repository action secrets [here](https://github.com/${context.payload.repository.full_name}/settings/secrets/actions). For more information on how to setup the Atlan dbt Action, please read the [setup documentation here](https://github.com/atlanhq/dbt-action/blob/main/README.md).`
+}
- if (!datasources.has(contractYaml.data_source)) {
- return;
- }
+function getSetResourceOnAssetComment(tableMd, setResourceFailed) {
+ return `## 🎊 Congrats on the merge!
+
+ This pull request has been added as a resource to the following assets:
+
+ ${setResourceFailed ? '> ⚠️ Seems like we were unable to set the resources for some of the assets due to insufficient permissions. To ensure that the pull request is linked as a resource, you will need to assign the right persona with requisite permissions to the API token.' : ''}
+
+ Name | Resource set successfully
+ --- | ---
+ ${tableMd}
+ `
+}
+
+function getAssetInfo(ATLAN_INSTANCE_URL, asset, materialisedAsset, environmentName, projectName) {
+ return `### ${getConnectorImage(
+ asset.attributes.connectorName
+ )} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/overview?utm_source=dbt_github_action) ${
+ asset.attributes?.certificateStatus
+ ? getCertificationImage(asset.attributes.certificateStatus)
+ : ""
+ }
+ Materialised asset: ${getConnectorImage(
+ materialisedAsset.attributes.connectorName
+ )} [${materialisedAsset.attributes.name}](${ATLAN_INSTANCE_URL}/assets/${
+ materialisedAsset.guid
+ }/overview?utm_source=dbt_github_action) ${
+ materialisedAsset.attributes?.certificateStatus
+ ? getCertificationImage(materialisedAsset.attributes.certificateStatus)
+ : ""
+ }${environmentName ? ` | Environment Name: \`${environmentName}\`` : ""}${
+ projectName ? ` | Project Name: \`${projectName}\`` : ""
+ }`
+}
- let datasource = datasources.get(contractYaml.data_source)
- const qualifiedName = datasource?.connection?.qualified_name || '';
- const database = datasource?.database || '';
- const schema = datasource?.schema || '';
- // Format the output
- const assetQualifiedName = `${qualifiedName}/${database}/${schema}/${contractYaml.dataset}`;
- return assetQualifiedName;
+function getDownstreamTable(ATLAN_INSTANCE_URL, downstreamAssets, rows, materialisedAsset) {
+ return `${
+ downstreamAssets.entityCount
+ } downstream assets 👇
+
+ Name | Type | Description | Owners | Terms | Classifications | Source URL
+ --- | --- | --- | --- | --- | --- | ---
+ ${rows
+ .map((row) =>
+ row.map((i) => i.replace(/\|/g, "•").replace(/\n/g, "")).join(" | ")
+ )
+ .join("\n")}
+
+ ${
+ downstreamAssets.hasMore
+ ? `[See more downstream assets at Atlan](${ATLAN_INSTANCE_URL}/assets/${materialisedAsset.guid}/lineage?utm_source=dbt_github_action)`
+ : ""
}
+
+ `
+}
+
+function getViewAssetButton(ATLAN_INSTANCE_URL, asset) {
+ return `${getImageURL(
+ "atlan-logo",
+ 15,
+ 15
+ )} [View asset in Atlan](${ATLAN_INSTANCE_URL}/assets/${
+ asset.guid
+ }/overview?utm_source=dbt_github_action)`
+}
+
+function getMDCommentForModel(ATLAN_INSTANCE_URL, model) {
+ return `${getConnectorImage(model?.attributes?.connectorName)} [${
+ model?.displayText
+ }](${ATLAN_INSTANCE_URL}/assets/${model?.guid}/overview?utm_source=dbt_github_action)`
+}
+
+function getMDCommentForMaterialisedView(ATLAN_INSTANCE_URL, materialisedView) {
+ return `${getConnectorImage(materialisedView?.attributes?.connectorName)} [${
+ materialisedView?.attributes?.name
+ }](${ATLAN_INSTANCE_URL}/assets/${materialisedView?.guid}/overview?utm_source=dbt_github_action)`
}
+function getTableMD(md, resp) {
+ return `${md} | ${resp ? '✅' : '❌'} \n`
+}
;// CONCATENATED MODULE: ./adapters/integrations/github-integration.js
// githubIntegration.js
@@ -30628,8 +25530,8 @@ ${viewAssetButton}`;
-var github_integration_headSHA;
-const github_integration_integrationName = "github";
+var headSHA;
+const integrationName = "github";
class GitHubIntegration extends IntegrationInterface {
constructor(token) {
super(token);
@@ -30643,20 +25545,20 @@ class GitHubIntegration extends IntegrationInterface {
const octokit = github.getOctokit(this.token);
const { pull_request } = context?.payload;
const { state, merged } = pull_request;
- github_integration_headSHA = pull_request?.head?.sha;
+ headSHA = pull_request?.head?.sha;
logger_logger.withInfo(
"GitHub Integration is running...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"run"
);
if (!(await this.authIntegration({ octokit, context }))) {
logger_logger.withError(
"Authentication failed. Wrong API Token.",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"run"
);
throw { message: "Wrong API Token" };
@@ -30682,15 +25584,15 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
"Successfully Completed DBT_CI_ACTION",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"run"
);
} catch (error) {
logger_logger.withError(
`Error in run(): ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"run"
);
throw error;
@@ -30700,8 +25602,8 @@ class GitHubIntegration extends IntegrationInterface {
async printDownstreamAssets({ octokit, context }) {
logger_logger.withInfo(
"Printing downstream assets...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
@@ -30713,8 +25615,8 @@ class GitHubIntegration extends IntegrationInterface {
for (const { fileName, filePath, status } of changedFiles) {
logger_logger.withInfo(
`Processing file: ${fileName}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
const aliasName = await this.getAssetName({
@@ -30736,8 +25638,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
`Processing asset: ${assetName} in environment: ${environment}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
const asset = await getAsset({
@@ -30752,8 +25654,8 @@ class GitHubIntegration extends IntegrationInterface {
if (status === "added") {
logger_logger.withInfo(
`New model added: ${fileName}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
comments += getNewModelAddedComment(fileName);
@@ -30764,8 +25666,8 @@ class GitHubIntegration extends IntegrationInterface {
if (asset.error) {
logger_logger.withError(
`Asset error for ${assetName}: ${asset.error}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
comments += asset.error;
@@ -30791,8 +25693,8 @@ class GitHubIntegration extends IntegrationInterface {
if (downstreamAssets.error) {
logger_logger.withError(
`Downstream assets error for ${assetName}: ${downstreamAssets.error}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
comments += downstreamAssets.error;
@@ -30837,8 +25739,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
`Existing Comment: ${existingComment?.id}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
@@ -30859,8 +25761,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
"Successfully printed Downstream Assets",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
@@ -30868,8 +25770,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
`Error in printDownstreamAssets: ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"printDownstreamAssets"
);
throw error;
@@ -30879,8 +25781,8 @@ class GitHubIntegration extends IntegrationInterface {
async setResourceOnAsset({ octokit, context }) {
logger_logger.withInfo(
"Setting resources on assets...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
@@ -30894,8 +25796,8 @@ class GitHubIntegration extends IntegrationInterface {
if (changedFiles.length === 0) {
logger_logger.withInfo(
"No changed files found. Skipping resource setup.",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
return totalChangedFiles;
@@ -30904,8 +25806,8 @@ class GitHubIntegration extends IntegrationInterface {
for (const { fileName, filePath } of changedFiles) {
logger_logger.withInfo(
`Processing file: ${fileName}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
const aliasName = await this.getAssetName({
@@ -30919,8 +25821,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
`Resolved asset name: ${assetName}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
@@ -30935,8 +25837,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
`Processing asset: ${assetName} in environment: ${environment}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
@@ -30950,8 +25852,8 @@ class GitHubIntegration extends IntegrationInterface {
if (asset.error) {
logger_logger.withError(
`Failed to retrieve asset: ${assetName}, Error: ${asset.error}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
continue;
@@ -30975,8 +25877,8 @@ class GitHubIntegration extends IntegrationInterface {
if (downstreamAssets.error) {
logger_logger.withError(
`Failed to retrieve downstream assets for: ${assetName}, Error: ${downstreamAssets.error}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
continue;
@@ -31007,15 +25909,15 @@ class GitHubIntegration extends IntegrationInterface {
this.sendSegmentEventOfIntegration
);
- const md = getMDCommentForModel(get_environment_variables_ATLAN_INSTANCE_URL, model);
+ const md = getMDCommentForModel(ATLAN_INSTANCE_URL, model);
tableMd += getTableMD(md, resp);
if (!resp) {
setResourceFailed = true;
logger_logger.withError(
`Setting resource failed for model: ${modelGuid}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
}
@@ -31031,7 +25933,7 @@ class GitHubIntegration extends IntegrationInterface {
);
const md = getMDCommentForMaterialisedView(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
materialisedView
);
@@ -31040,8 +25942,8 @@ class GitHubIntegration extends IntegrationInterface {
setResourceFailed = true;
logger_logger.withError(
`Setting resource failed for materialized view: ${tableAssetGuid}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
}
@@ -31060,8 +25962,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
"Successfully set the resource on the asset",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
@@ -31069,8 +25971,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
`Error in setResourceOnAsset: ${error}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"setResourceOnAsset"
);
throw error;
@@ -31080,8 +25982,8 @@ class GitHubIntegration extends IntegrationInterface {
async authIntegration({ octokit, context }) {
logger_logger.withInfo(
"Authenticating with Atlan",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
@@ -31095,22 +25997,22 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
`Existing Comment: ${existingComment?.id}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
if (response?.status === 401) {
logger_logger.withError(
"Authentication failed: Status 401",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
await this.createIssueComment({
octokit,
context,
- content: getErrorResponseStatus401(get_environment_variables_ATLAN_INSTANCE_URL, context),
+ content: getErrorResponseStatus401(ATLAN_INSTANCE_URL, context),
comment_id: existingComment?.id,
});
return false;
@@ -31119,30 +26021,30 @@ class GitHubIntegration extends IntegrationInterface {
if (response === undefined) {
logger_logger.withError(
"Authentication failed: Undefined response",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
await this.createIssueComment({
octokit,
context,
- content: getErrorResponseStatusUndefined(get_environment_variables_ATLAN_INSTANCE_URL, context),
+ content: getErrorResponseStatusUndefined(ATLAN_INSTANCE_URL, context),
comment_id: existingComment?.id,
});
return false;
}
logger_logger.withInfo(
"Successfully Authenticated with Atlan",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
return true;
} catch (error) {
logger_logger.withError(
`Error in authIntegration: ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"authIntegration"
);
throw error;
@@ -31151,16 +26053,16 @@ class GitHubIntegration extends IntegrationInterface {
async sendSegmentEventOfIntegration({ action, properties }) {
try {
- const domain = new URL(get_environment_variables_ATLAN_INSTANCE_URL).hostname;
+ const domain = new URL(ATLAN_INSTANCE_URL).hostname;
const { context } = github; //confirm this
logger_logger.withInfo(
`Sending Segment event for action: ${action}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"sendSegmentEventOfIntegration"
);
- const raw = json_stringify_safe_stringify({
+ const raw = stringify({
category: "integration",
object: "github",
action,
@@ -31176,8 +26078,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
`Error sending Segment event for action: ${action} - ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"sendSegmentEventOfIntegration"
);
throw error;
@@ -31188,8 +26090,8 @@ class GitHubIntegration extends IntegrationInterface {
try {
logger_logger.withInfo(
"Fetching changed files...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getChangedFiles"
);
@@ -31226,8 +26128,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (e) {
logger_logger.withError(
`Error processing file: ${filename} - ${e.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getChangedFiles"
);
}
@@ -31243,8 +26145,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
"Successfully fetched changed files",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getChangedFiles"
);
@@ -31252,8 +26154,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
`Error fetching changed files - ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getChangedFiles"
);
throw error;
@@ -31264,8 +26166,8 @@ class GitHubIntegration extends IntegrationInterface {
try {
logger_logger.withInfo(
"Getting asset name...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getAssetName"
);
@@ -31282,8 +26184,8 @@ class GitHubIntegration extends IntegrationInterface {
if (matches) {
logger_logger.withInfo(
`Found a match: ${matches[1].trim()}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getAssetName"
);
return matches[1].trim();
@@ -31291,16 +26193,16 @@ class GitHubIntegration extends IntegrationInterface {
}
logger_logger.withInfo(
`Using filename as asset name: ${fileName}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getAssetName"
);
return fileName;
} catch (error) {
logger_logger.withError(
`Error getting asset name - ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getAssetName"
);
throw error;
@@ -31311,8 +26213,8 @@ class GitHubIntegration extends IntegrationInterface {
try {
logger_logger.withInfo(
"Fetching file contents...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getFileContents"
);
@@ -31333,8 +26235,8 @@ class GitHubIntegration extends IntegrationInterface {
.catch((e) => {
logger_logger.withError(
`Error fetching file contents: ${e.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getFileContents"
);
return null;
@@ -31346,8 +26248,8 @@ class GitHubIntegration extends IntegrationInterface {
logger_logger.withInfo(
"Successfully fetched file contents",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getFileContents"
);
@@ -31355,8 +26257,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
`Error in getFileContents: ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"getFileContents"
);
throw error;
@@ -31366,16 +26268,16 @@ class GitHubIntegration extends IntegrationInterface {
async checkCommentExists({ octokit, context }) {
logger_logger.withInfo(
"Checking for existing comments...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"checkCommentExists"
);
if (IS_DEV) {
logger_logger.withInfo(
"Development mode enabled. Skipping comment check.",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"checkCommentExists"
);
return null;
@@ -31399,15 +26301,15 @@ class GitHubIntegration extends IntegrationInterface {
if (existingComment) {
logger_logger.withInfo(
"Found existing comment: " + existingComment?.id,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"checkCommentExists"
);
} else {
logger_logger.withInfo(
"No existing comment found",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"checkCommentExists"
);
}
@@ -31416,8 +26318,8 @@ class GitHubIntegration extends IntegrationInterface {
} catch (error) {
logger_logger.withError(
"Error checking for existing comments: " + error.message,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"checkCommentExists"
);
throw error;
@@ -31433,8 +26335,8 @@ class GitHubIntegration extends IntegrationInterface {
}) {
logger_logger.withInfo(
"Creating an issue comment...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"createIssueComment"
);
@@ -31452,8 +26354,8 @@ ${content}`;
if (IS_DEV) {
logger_logger.withInfo(
"Development mode enabled. Skipping comment creation.",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"createIssueComment"
);
@@ -31468,8 +26370,8 @@ ${content}`;
async deleteComment({ octokit, context, comment_id }) {
logger_logger.withInfo(
`Deleted comment with ID ${comment_id}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"deleteComment"
);
@@ -31492,8 +26394,8 @@ ${content}`;
}) {
logger_logger.withInfo(
"Rendering Downstream Assets...",
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"renderDownstreamAssetsComment"
);
try {
@@ -31537,7 +26439,7 @@ ${content}`;
truncate(
meanings.map(
({ displayText, termGuid }) =>
- `[${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_github_action)`
+ `[${displayText}](${ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_github_action)`
)
),
truncate(
@@ -31569,13 +26471,13 @@ ${content}`;
sourceUrl,
]) => {
// Getting connector and certification images
- const connectorImage = get_image_url_getConnectorImage(connectorName);
+ const connectorImage = getConnectorImage(connectorName);
const certificationImage = certificateStatus
- ? get_image_url_getCertificationImage(certificateStatus)
+ ? getCertificationImage(certificateStatus)
: "";
return [
- `${connectorImage} [${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_github_action) ${certificationImage}`,
+ `${connectorImage} [${displayText}](${ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_github_action) ${certificationImage}`,
`\`${typeName}\``,
description,
owners,
@@ -31592,7 +26494,7 @@ ${content}`;
// Generating asset information
const assetInfo = getAssetInfo(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
asset,
materialisedAsset,
environmentName,
@@ -31601,14 +26503,14 @@ ${content}`;
// Generating the downstream table
const downstreamTable = getDownstreamTable(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
downstreamAssets,
rows,
materialisedAsset
);
// Generating the "View asset in Atlan" button
- const viewAssetButton = getViewAssetButton(get_environment_variables_ATLAN_INSTANCE_URL, asset);
+ const viewAssetButton = getViewAssetButton(ATLAN_INSTANCE_URL, asset);
// Generating the final comment based on the presence of downstream assets
if (downstreamAssets.entities.length > 0) {
@@ -31627,8 +26529,8 @@ ${viewAssetButton}`;
} catch (error) {
logger_logger.withError(
`Error rendering Downstream Assets: ${error.message}`,
- github_integration_integrationName,
- github_integration_headSHA,
+ integrationName,
+ headSHA,
"renderDownstreamAssetsComment"
);
throw error;
@@ -39084,22 +33986,22 @@ function gitlab_integration_getSetResourceOnAssetComment(tableMd, setResourceFai
}
function gitlab_integration_getAssetInfo(ATLAN_INSTANCE_URL, asset, materialisedAsset, environmentName, projectName) {
- return `### ${get_image_url_getConnectorImage(
+ return `### ${getConnectorImage(
asset.attributes.connectorName
)} [${asset.displayText}](${ATLAN_INSTANCE_URL}/assets/${
asset.guid
}/overview?utm_source=dbt_gitlab_action) ${
asset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(asset.attributes.certificateStatus)
+ ? getCertificationImage(asset.attributes.certificateStatus)
: ""
}
-Materialised asset: ${get_image_url_getConnectorImage(
+Materialised asset: ${getConnectorImage(
materialisedAsset.attributes.connectorName
)} [${materialisedAsset.attributes.name}](${ATLAN_INSTANCE_URL}/assets/${
materialisedAsset.guid
}/overview?utm_source=dbt_gitlab_action) ${
materialisedAsset.attributes?.certificateStatus
- ? get_image_url_getCertificationImage(materialisedAsset.attributes.certificateStatus)
+ ? getCertificationImage(materialisedAsset.attributes.certificateStatus)
: ""
}${environmentName ? ` | Environment Name: \`${environmentName}\`` : ""}${
projectName ? ` | Project Name: \`${projectName}\`` : ""
@@ -39129,7 +34031,7 @@ ${
}
function gitlab_integration_getViewAssetButton(ATLAN_INSTANCE_URL, asset) {
- return `${get_image_url_getImageURL(
+ return `${getImageURL(
"atlan-logo",
15,
15
@@ -39139,13 +34041,13 @@ function gitlab_integration_getViewAssetButton(ATLAN_INSTANCE_URL, asset) {
}
function gitlab_integration_getMDCommentForModel(ATLAN_INSTANCE_URL, model) {
- return `${get_image_url_getConnectorImage(model?.attributes?.connectorName)} [${
+ return `${getConnectorImage(model?.attributes?.connectorName)} [${
model?.displayText
}](${ATLAN_INSTANCE_URL}/assets/${model?.guid}/overview?utm_source=dbt_gitlab_action)`
}
function gitlab_integration_getMDCommentForMaterialisedView(ATLAN_INSTANCE_URL, materialisedView) {
- return `${get_image_url_getConnectorImage(materialisedView?.attributes?.connectorName)} [${
+ return `${getConnectorImage(materialisedView?.attributes?.connectorName)} [${
materialisedView?.attributes?.name
}](${ATLAN_INSTANCE_URL}/assets/${materialisedView?.guid}/overview?utm_source=dbt_gitlab_action)`
}
@@ -39565,7 +34467,7 @@ class GitLabIntegration extends IntegrationInterface {
web_url,
this.sendSegmentEventOfIntegration
);
- const md = gitlab_integration_getMDCommentForModel(get_environment_variables_ATLAN_INSTANCE_URL, model);
+ const md = gitlab_integration_getMDCommentForModel(ATLAN_INSTANCE_URL, model);
tableMd += gitlab_integration_getTableMD(md, resp);
if (!resp) {
setResourceFailed = true;
@@ -39587,7 +34489,7 @@ class GitLabIntegration extends IntegrationInterface {
this.sendSegmentEventOfIntegration
);
const md = gitlab_integration_getMDCommentForMaterialisedView(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
materialisedView
);
tableMd += gitlab_integration_getTableMD(md, resp);
@@ -39662,7 +34564,7 @@ class GitLabIntegration extends IntegrationInterface {
await this.createIssueComment({
gitlab,
content: gitlab_integration_getErrorResponseStatus401(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
CI_PROJECT_NAME,
CI_PROJECT_NAMESPACE
),
@@ -39681,7 +34583,7 @@ class GitLabIntegration extends IntegrationInterface {
await this.createIssueComment({
gitlab,
content: gitlab_integration_getErrorResponseStatusUndefined(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
CI_PROJECT_NAME,
CI_PROJECT_NAMESPACE
),
@@ -39752,7 +34654,7 @@ ${content}`;
async sendSegmentEventOfIntegration({ action, properties }) {
try {
- const domain = new URL(get_environment_variables_ATLAN_INSTANCE_URL).hostname;
+ const domain = new URL(ATLAN_INSTANCE_URL).hostname;
logger_logger.withInfo(
`Sending Segment event for action: ${action}`,
gitlab_integration_integrationName,
@@ -39760,7 +34662,7 @@ ${content}`;
"sendSegmentEventOfIntegration"
);
- const raw = json_stringify_safe_stringify({
+ const raw = stringify({
category: "integration",
object: "gitlab",
action,
@@ -40141,7 +35043,7 @@ ${content}`;
truncate(
meanings.map(
({ displayText, termGuid }) =>
- `[${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_gitlab_action)`
+ `[${displayText}](${ATLAN_INSTANCE_URL}/assets/${termGuid}/overview?utm_source=dbt_gitlab_action)`
)
),
truncate(
@@ -40173,13 +35075,13 @@ ${content}`;
sourceUrl,
]) => {
// Getting connector and certification images
- const connectorImage = get_image_url_getConnectorImage(connectorName);
+ const connectorImage = getConnectorImage(connectorName);
const certificationImage = certificateStatus
- ? get_image_url_getCertificationImage(certificateStatus)
+ ? getCertificationImage(certificateStatus)
: "";
return [
- `${connectorImage} [${displayText}](${get_environment_variables_ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_gitlab_action) ${certificationImage}`,
+ `${connectorImage} [${displayText}](${ATLAN_INSTANCE_URL}/assets/${guid}/overview?utm_source=dbt_gitlab_action) ${certificationImage}`,
`\`${typeName}\``,
description,
owners,
@@ -40195,7 +35097,7 @@ ${content}`;
const projectName = materialisedAsset?.attributes?.assetDbtProjectName;
// Generating asset information
const assetInfo = gitlab_integration_getAssetInfo(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
asset,
materialisedAsset,
environmentName,
@@ -40204,14 +35106,14 @@ ${content}`;
// Generating the downstream table
const downstreamTable = gitlab_integration_getDownstreamTable(
- get_environment_variables_ATLAN_INSTANCE_URL,
+ ATLAN_INSTANCE_URL,
downstreamAssets,
rows,
materialisedAsset
);
// Generating the "View asset in Atlan" button
- const viewAssetButton = gitlab_integration_getViewAssetButton(get_environment_variables_ATLAN_INSTANCE_URL, asset);
+ const viewAssetButton = gitlab_integration_getViewAssetButton(ATLAN_INSTANCE_URL, asset);
// Generating the final comment based on the presence of downstream assets
if (downstreamAssets.entities.length > 0) {
@@ -40239,30 +35141,15 @@ ${viewAssetButton}`;
}
}
-;// CONCATENATED MODULE: ./adapters/gateway.js
-// Common Gateway for all integrations
-
-async function runAction(token, integrationModule) {
- if (token === undefined) {
- logger_logger.logInfo("Token not provided.", "runAction");
- return;
- }
- const integration = new integrationModule(token);
- await integration.run();
-}
-
;// CONCATENATED MODULE: ./adapters/index.js
+// main.js
-// main.js
-
-
async function run() {
//Add new integrations over here
- await runAction(GITHUB_TOKEN, ContractIntegration);
await runAction(GITHUB_TOKEN, GitHubIntegration);
await runAction(GITLAB_TOKEN, GitLabIntegration);
}
diff --git a/package-lock.json b/package-lock.json
index 0803521..5d68496 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -17,7 +17,6 @@
"@gitbeaker/rest": "^39.19.0",
"@vercel/ncc": "^0.34.0",
"dotenv": "^16.0.3",
- "js-yaml": "^4.1.0",
"json-stringify-safe": "^5.0.1",
"node-fetch": "^3.3.0",
"uuid": "^9.0.0"
@@ -268,12 +267,6 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
- "node_modules/argparse": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
- "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
- "license": "Python-2.0"
- },
"node_modules/async-sema": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/async-sema/-/async-sema-3.1.1.tgz",
@@ -476,18 +469,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
- "license": "MIT",
- "dependencies": {
- "argparse": "^2.0.1"
- },
- "bin": {
- "js-yaml": "bin/js-yaml.js"
- }
- },
"node_modules/json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
diff --git a/package.json b/package.json
index 52ddad8..cf55044 100644
--- a/package.json
+++ b/package.json
@@ -35,7 +35,6 @@
"@gitbeaker/rest": "^39.19.0",
"@vercel/ncc": "^0.34.0",
"dotenv": "^16.0.3",
- "js-yaml": "^4.1.0",
"json-stringify-safe": "^5.0.1",
"node-fetch": "^3.3.0",
"uuid": "^9.0.0"