From 51d6efbefa7829fdb6564f21409e40d73be366a5 Mon Sep 17 00:00:00 2001 From: Pascal Desmarets Date: Thu, 29 Mar 2018 12:53:16 +0200 Subject: [PATCH] Forward- and reverse-engineering --- forward_engineering/.gitignore | 1 + forward_engineering/api.js | 127 ++++ forward_engineering/config.json | 6 + forward_engineering/package.json | 8 + helper/helper.js | 37 + localization/en.json | 15 +- package.json | 15 +- .../field_level/fieldLevelConfig.json | 170 +++-- .../model_level/modelLevelConfig.json | 4 +- reverse_engineering/SchemaCreator.js | 357 +++++++++ reverse_engineering/api.js | 696 +++++++++--------- reverse_engineering/config.json | 1 + .../connectionSettingsModalConfig.json | 114 ++- reverse_engineering/package.json | 4 +- types/array.json | 3 +- types/date.json | 1 + types/nested.json | 2 +- types/object.json | 1 + types/string.json | 3 +- 19 files changed, 1141 insertions(+), 424 deletions(-) create mode 100644 forward_engineering/.gitignore create mode 100644 forward_engineering/api.js create mode 100644 forward_engineering/config.json create mode 100644 forward_engineering/package.json create mode 100644 helper/helper.js create mode 100644 reverse_engineering/SchemaCreator.js diff --git a/forward_engineering/.gitignore b/forward_engineering/.gitignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/forward_engineering/.gitignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/forward_engineering/api.js b/forward_engineering/api.js new file mode 100644 index 0000000..55ad43c --- /dev/null +++ b/forward_engineering/api.js @@ -0,0 +1,127 @@ +const helper = require('../helper/helper.js'); + +module.exports = { + generateScript(data, logger, cb) { + const { jsonSchema, modelData, containerData, entityData, isUpdateScript } = data; + let result = ""; + let mappingScript = { + mappings: { + [entityData.collectionName.toLowerCase()]: { + properties: this.getMappingScript(JSON.parse(jsonSchema)) + } + } + }; + + if (isUpdateScript) { + result = this.getCurlScript(mappingScript, modelData, containerData); + } else { + result += this.getKibanaScript(mappingScript, containerData); + } + + cb(null, result); + }, + + getCurlScript(mapping, modelData, indexData) { + const host = modelData.host || 'localhost'; + const port = modelData.port || 9200; + const indexName = indexData.name || ""; + + return `curl -XPUT '${host}:${port}/${indexName.toLowerCase()}?pretty' -H 'Content-Type: application/json' -d '\n${JSON.stringify(mapping, null, 4)}\n'`; + }, + + getKibanaScript(mapping, indexData) { + const indexName = indexData.name || ""; + + return `PUT /${indexName.toLowerCase()}\n${JSON.stringify(mapping, null, 4)}`; + }, + + getMappingScript(jsonSchema) { + let schema = {}; + + if (!(jsonSchema.properties && jsonSchema.properties._source && jsonSchema.properties._source.properties)) { + return schema; + } + + schema = this.getSchemaByItem(jsonSchema.properties._source.properties) + + return schema; + }, + + getSchemaByItem(properties) { + let schema = {}; + + for (let fieldName in properties) { + let field = properties[fieldName]; + + schema[fieldName] = this.getField(field); + } + + return schema; + }, + + getField(field) { + let schema = {}; + const fieldProperties = helper.getFieldProperties(field.type, field, {}); + let type = this.getFieldType(field); + + if (type !== 'object' && type !== 'array') { + schema.type = type; + } + + if (type === 'object') { + schema.properties = {}; + } + + this.setProperties(schema, fieldProperties); + + if (type === 'geo_shape' || type === 'geo_point') { + return schema; + } else if (field.properties) { + schema.properties = this.getSchemaByItem(field.properties); + } else if (field.items) { + let arrData = field.items; + + if (Array.isArray(field.items)) { + arrData = field.items[0]; + } + + schema = Object.assign(schema, this.getField(arrData)); + } + + return schema; + }, + + getFieldType(field) { + switch(field.type) { + case 'geo-shape': + return 'geo_shape'; + case 'geo-point': + return 'geo_point'; + case 'number': + return field.mode || 'long'; + case 'string': + return field.mode || 'text'; + case 'range': + return field.mode || 'integer_range'; + case 'null': + return 'long'; + default: + return field.type; + } + }, + + setProperties(schema, properties) { + for (let propName in properties) { + if (propName === 'stringfields') { + try { + schema['fields'] = JSON.parse(properties[propName]); + } catch (e) { + } + } else { + schema[propName] = properties[propName]; + } + } + + return schema; + } +}; diff --git a/forward_engineering/config.json b/forward_engineering/config.json new file mode 100644 index 0000000..086d068 --- /dev/null +++ b/forward_engineering/config.json @@ -0,0 +1,6 @@ +{ + "extension": "txt", + "filterName": "Plain text", + "namePrefix": "Elasticsearch Mapping", + "hasUpdateScript": true +} diff --git a/forward_engineering/package.json b/forward_engineering/package.json new file mode 100644 index 0000000..59369db --- /dev/null +++ b/forward_engineering/package.json @@ -0,0 +1,8 @@ +{ + "name": "elasticsearch", + "version": "1.0.0", + "description": "", + "author": "Hackolade", + "dependencies": { + } +} diff --git a/helper/helper.js b/helper/helper.js new file mode 100644 index 0000000..8a792a9 --- /dev/null +++ b/helper/helper.js @@ -0,0 +1,37 @@ +const fs = require('fs'); +const path = require('path'); +const fieldLevelConfig = JSON.parse(fs.readFileSync(path.join(__dirname, '../properties_pane/field_level/fieldLevelConfig.json')).toString().replace(/\/\*[.\s\S]*\*\//ig, "")); + +module.exports = { + getTargetFieldLevelPropertyNames(type, data) { + if (!fieldLevelConfig.structure[type]) { + return []; + } + + return fieldLevelConfig.structure[type].filter(property => { + if (typeof property === 'object' && property.isTargetProperty) { + if (property.dependency) { + return (data[property.dependency.key] == property.dependency.value); + } else { + return true; + } + } + + return false; + }).map(property => property.propertyKeyword); + }, + + getFieldProperties(type, data, pseudonyms) { + const propertyNames = this.getTargetFieldLevelPropertyNames(type, data); + + return propertyNames.reduce((result, propertyName) => { + if (Object.prototype.hasOwnProperty.call(data, propertyName)) { + result[propertyName] = data[propertyName]; + } else if (Object.prototype.hasOwnProperty.call(data, pseudonyms[propertyName])) { + result[pseudonyms[propertyName]] = data[pseudonyms[propertyName]]; + } + + return result; + }, {}); + } +}; diff --git a/localization/en.json b/localization/en.json index bf48581..54a48e1 100644 --- a/localization/en.json +++ b/localization/en.json @@ -8,6 +8,7 @@ "MAIN_MENU___INSERT_FIELD": "Insert Field", "MAIN_MENU___APPEND_FIELD": "Append Field", "MAIN_MENU___REVERSE_DB_COLLECTIONS": "Elasticsearch indices...", + "MAIN_MENU___FORWARD_DB_COLLECTIONS": "Elasticsearch Mapping", "TOOLBAR___ADD_BUCKET": "Add index", "TOOLBAR___ADD_COLLECTION": "Add type", "TOOLBAR___ADD_VIEW": "Add Filtered Alias", @@ -84,6 +85,7 @@ "MODAL_WINDOW___CONTAIN_BUCKET": "indices", "MODAL_WINDOW___CONTAIN_COLLECTION": "type", "MODAL_WINDOW___DB_CONNECTION_PROCESS": "Elasticsearch Reverse-Engineering Process", + "MODAL_WINDOW___DB_CONNECTIONS_LIST_TITLE": "Elasticsearch Connections", "PROGRESS_BAR___DATABASE": "Index", "PROGRESS_BAR___COLLECTION": "Type", "PROGRESS_BAR___PROCESS": "Process", @@ -132,10 +134,13 @@ "COLLECTION_SCHEMA_DEFINITION_TYPE": "document", "MONGODB_SCRIPT_WARNING_MESSAGE": "This view is not associated to a type (viewOn property).", "TYPE": {}, - "CENTRAL_PANE___TAB_MODEL_DEFINITIONS": "User-Defined Types", - "CONTEXT_MENU___ADD_MODEL_REFERENCE": "User-Defined Type", - "CONTEXT_MENU___GO_TO_DEFINITION": "Go to User-Defined Type", - "DOCUMENTATION___DB_DEFINITIONS": "User-Defined Types", + "CENTRAL_PANE___TAB_MODEL_DEFINITIONS": "Model Definitions", + "CONTEXT_MENU___ADD_MODEL_REFERENCE": "Model Definition", + "CONTEXT_MENU___GO_TO_DEFINITION": "Go to Model Definition", + "DOCUMENTATION___DB_DEFINITIONS": "Model Definitions", "CONTEXT_MENU___CONVERT_TO_PATTERN_FIELD": "Convert to Pattern Field", - "CONTEXT_MENU___CONVERT_PATTERN_TO_REGULAR_FIELD": "Convert to Regular Field" + "CONTEXT_MENU___CONVERT_PATTERN_TO_REGULAR_FIELD": "Convert to Regular Field", + "CENTRAL_PANE___FE_SCRIPT": "Elasticsearch Mapping", + "MODAL_WINDOW___FE_SCRIPT_OPTION_UPDATE": "CURL Script", + "MODAL_WINDOW___FE_SCRIPT_OPTION_CREATE": "Kibana Script" } \ No newline at end of file diff --git a/package.json b/package.json index 85fc05a..f9c57d8 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,10 @@ { "name": "Elasticsearch", - "version": "0.1.4", - "versionDate": "2018-02-25", + "version": "0.1.5", + "versionDate": "2018-03-29", "author": "hackolade", "engines": { - "hackolade": "1.9.x", + "hackolade": "1.12.7", "hackoladePlugin": "1.0.0" }, "contributes": { @@ -21,12 +21,15 @@ "5.4.x", "5.5.x", "5.6.x", - "6.0.x" + "6.0.x", + "6.1.x", + "6.2.x" ] }, "features": { - "nestedCollections": true, - "disableMultipleTypes": true + "disableMultipleTypes": true, + "enableReverseEngineering": true, + "enableForwardEngineering": true } }, "description": "Hackolade plugin for Elasticsearch" diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index a47060e..9f20ea8 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -111,7 +111,8 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyTooltip": "Popup for fields entry", "propertyType": "details", - "template": "textarea" + "template": "textarea", + "isTargetProperty": true }, { "propertyName": "boost", @@ -119,19 +120,26 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "isTargetProperty": true }, { "propertyName": "doc_values", "propertyKeyword": "doc_values", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "dependency": { + "key": "mode", + "value": "keyword" + }, + "isTargetProperty": true }, { "propertyName": "eager_global_ordinals", "propertyKeyword": "eager_global_ordinals", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "ignore_above", @@ -139,19 +147,26 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "dependency": { + "key": "mode", + "value": "keyword" + }, + "isTargetProperty": true }, { "propertyName": "include_in_all", "propertyKeyword": "include_in_all", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "index", "propertyKeyword": "index", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "index_options", @@ -164,25 +179,53 @@ making sure that you maintain a proper JSON format. "freqs", "positions", "offsets" - ] + ], + "dependency": { + "key": "mode", + "value": "text" + }, + "isTargetProperty": true + }, + { + "propertyName": "index_options", + "propertyKeyword": "index_options", + "shouldValidate": false, + "propertyTooltip": "Select from list of parameter controls for information is added to the inverted index, for search and highlighting purposes", + "propertyType": "select", + "options": [ + "docs", + "freqs" + ], + "dependency": { + "key": "mode", + "value": "keyword" + }, + "isTargetProperty": true }, { "propertyName": "norms", "propertyKeyword": "norms", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "null_value", "propertyKeyword": "null_value", "propertyValidate": false, - "propertyType": "text" + "propertyType": "text", + "dependency": { + "key": "mode", + "value": "keyword" + }, + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "similarity", @@ -191,16 +234,23 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Select from list of scoring algorithm or similarity per field", "propertyType": "select", "options": [ + "", "BM25", "classic", "boolean" - ] + ], + "isTargetProperty": true }, { "propertyName": "normalizer", "propertyKeyword": "normalizer", "propertyValidate": false, - "propertyType": "text" + "propertyType": "text", + "dependency": { + "key": "mode", + "value": "keyword" + }, + "isTargetProperty": true }, "minLength", "maxLength", @@ -244,7 +294,8 @@ making sure that you maintain a proper JSON format. "propertyName": "coerce", "propertyKeyword": "coerce", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "boost", @@ -252,43 +303,50 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "isTargetProperty": true }, { "propertyName": "doc_values", "propertyKeyword": "doc_values", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "ignore_malformed", "propertyKeyword": "ignore_malformed", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "include_in_all", "propertyKeyword": "include_in_all", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "index", "propertyKeyword": "index", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "null_value", "propertyKeyword": "null_value", "propertyValidate": false, - "propertyType": "text" + "propertyType": "text", + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "scaling_factor", @@ -296,7 +354,12 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "dependency": { + "key": "mode", + "value": "scaled_float" + }, + "isTargetProperty": true }, "unit", "minimum", @@ -323,55 +386,64 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "isTargetProperty": true }, { "propertyName": "doc_values", "propertyKeyword": "doc_values", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "format", "propertyKeyword": "format", "shouldValidate": false, - "propertyType": "text" + "propertyType": "text", + "isTargetProperty": true }, { "propertyName": "locale", "propertyKeyword": "locale", "shouldValidate": false, - "propertyType": "text" + "propertyType": "text", + "isTargetProperty": true }, { "propertyName": "ignore_malformed", "propertyKeyword": "ignore_malformed", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "include_in_all", "propertyKeyword": "include_in_all", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "index", "propertyKeyword": "index", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "null_value", "propertyKeyword": "null_value", "propertyValidate": false, - "propertyType": "text" + "propertyType": "text", + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, "sample", "comments" @@ -390,31 +462,36 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "numeric", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "isTargetProperty": true }, { "propertyName": "doc_values", "propertyKeyword": "doc_values", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "index", "propertyKeyword": "index", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "null_value", "propertyKeyword": "null_value", "propertyValidate": false, - "propertyType": "text" + "propertyType": "text", + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, "sample", "comments" @@ -430,13 +507,15 @@ making sure that you maintain a proper JSON format. "propertyName": "doc_values", "propertyKeyword": "doc_values", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, "comments" ], @@ -458,13 +537,15 @@ making sure that you maintain a proper JSON format. "long_range", "double_range", "date_range" - ] + ], + "isTargetProperty": true }, { "propertyName": "coerce", "propertyKeyword": "coerce", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "boost", @@ -472,19 +553,22 @@ making sure that you maintain a proper JSON format. "propertyValidate": false, "propertyType": "text", "valueType": "number", - "allowNegative": false + "allowNegative": false, + "isTargetProperty": true }, { "propertyName": "include_in_all", "propertyKeyword": "include_in_all", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, { "propertyName": "store", "propertyKeyword": "store", "shouldValidate": false, - "propertyType": "checkbox" + "propertyType": "checkbox", + "isTargetProperty": true }, "minProperties", "maxProperties", diff --git a/properties_pane/model_level/modelLevelConfig.json b/properties_pane/model_level/modelLevelConfig.json index b390a90..41b59a0 100644 --- a/properties_pane/model_level/modelLevelConfig.json +++ b/properties_pane/model_level/modelLevelConfig.json @@ -111,7 +111,9 @@ making sure that you maintain a proper JSON format. "5.4.x", "5.5.x", "5.6.x", - "6.0.x" + "6.0.x", + "6.1.x", + "6.2.x" ], "disabledOption": false }, diff --git a/reverse_engineering/SchemaCreator.js b/reverse_engineering/SchemaCreator.js new file mode 100644 index 0000000..82dd374 --- /dev/null +++ b/reverse_engineering/SchemaCreator.js @@ -0,0 +1,357 @@ +const snippetsPath = "../snippets/"; + +const snippets = { + "geoJSON": require(snippetsPath + "geopoint-geojson.json"), + "geo-bounding": require(snippetsPath + "geopoint-geo-bounding.json"), + "string": require(snippetsPath + "geopoint-string.json"), + "geohash": require(snippetsPath + "geopoint-geohash.json"), + "object": require(snippetsPath + "geopoint-object.json"), + "envelope": require(snippetsPath + "geoshape-envelope.json"), + "linestring": require(snippetsPath + "geoshape-linestring.json"), + "multipoint": require(snippetsPath + "geoshape-multipoint.json"), + "point": require(snippetsPath + "geoshape-point.json"), + "circle": require(snippetsPath + "geoshape-circle.json"), + "geometrycollection": require(snippetsPath + "geoshape-geometrycollection.json"), + "multilinestring": require(snippetsPath + "geoshape-multilinestring.json"), + "multipolygon": require(snippetsPath + "geoshape-multipolygon.json"), + "polygon": require(snippetsPath + "geoshape-polygon.json") +}; + +const helper = require('../helper/helper'); + +module.exports = { + indices: [], + types: [], + logger: { log(type, ...data) { console[type](data); } }, + + init() { + this.types = []; + this.indices = []; + }, + + setLogger(logger) { + this.logger = logger; + }, + + addIndex(index) { + this.indices.push(index); + }, + + addType(type) { + this.types.push(type); + }, + + getMapping(client) { + return new Promise((resolve, reject) => { + client.indices.getMapping({ + index: this.indices, + type: this.types + }) + .then(resolve) + .catch(reject); + }); + }, + + getSchemaTemplate() { + return { + $schema: "http://json-schema.org/draft-04/schema#", + type: "object", + additionalProperties: false, + properties: {} + }; + }, + + getSchema(elasticMapping, sample) { + let schema = this.getSchemaTemplate(); + sample = sample || {}; + + schema.properties = this.getServiceFields(sample); + schema.properties._source.properties = this.getFields(elasticMapping.properties, sample._source); + + return schema; + }, + + getFields(properties, sample) { + let schema = {}; + + for (let fieldName in properties) { + const currentSample = sample && sample[fieldName]; + + schema[fieldName] = this.getField(properties[fieldName], currentSample); + } + + return schema; + }, + + getField(fieldData, sample) { + let schema = {}; + + if (!fieldData) { + return schema; + } + const hasProperties = !!fieldData.properties; + + schema = Object.assign(schema, this.getType(fieldData.type, sample, hasProperties)); + + let isArrayType = [ + 'nested', + 'array', + 'geo-point' + ].indexOf(schema.type) !== -1; + + if (hasProperties) { + let properties = this.getFields(fieldData.properties, sample); + + if (isArrayType) { + schema.items = [{ + type: 'object', + properties + }]; + } else { + schema.properties = properties; + } + } + + if (Array.isArray(sample) && !isArrayType) { + schema = { + type: 'array', + items: [schema] + }; + } + + if (schema.type === 'geo-shape' || schema.type === 'geo-point') { + schema = this.handleSnippet(schema); + } + + schema = this.setProperties(schema, fieldData); + + return schema; + }, + + getType(type, value, hasProperties) { + switch(type) { + case "long": + case "integer": + case "short": + case "byte": + case "double": + case "float": + case "half_float": + case "scaled_float": + return { + type: "number", + mode: type + }; + case "keyword": + case "text": + return { + type: "string", + mode: type + }; + case "integer_range": + case "float_range": + case "long_range": + case "double_range": + case "date_range": + return { + type: "range", + mode: type + }; + case "null": + case "boolean": + case "binary": + case "nested": + case "date": + return { type }; + case "geo_point": + return { + type: "geo-point", + subType: this.getGeoPointSubtype(value) + }; + case "geo_shape": + return { + type: "geo-shape", + subType: this.getGeoShapeSubtype(value) + }; + default: + if (value !== undefined) { + const scalar = this.getScalar(value); + + if (scalar === 'string') { + return { type: 'string', mode: 'text' }; + } else if (scalar === 'number') { + return { + type: 'number', + mode: this.getNumberMode(value) + }; + } else if (Array.isArray(value)) { + return { + type: 'array' + }; + } else { + return { + type: scalar + }; + } + } else { + if (hasProperties) { + return { type: "object" } + } else { + return {}; + } + } + } + }, + + getScalar(value) { + return typeof value; + }, + + getNumberMode(value) { + const byte = 0x7F; + const short = 0x7FFF; + const int = 0x7FFFFFFF; + const isFloat = (value - parseInt(value)) !== 0; + + if (isFloat) { + return 'float'; + } else { + if (value > -(byte + 1) && value < byte) { + return 'byte'; + } else if (value > -(short + 1) && value < short) { + return 'short'; + } else if (value > -(int + 1) && value < int) { + return 'integer'; + } else { + return 'long'; + } + } + }, + + getServiceFields(sample) { + let schema = { + _index: { type: "string", mode: "text" }, + _type: { type: "string", mode: "text" }, + _id: { type: "string", mode: "text" }, + _source: { type: "object", properties: {} } + }; + + if (!sample) { + return schema; + } + + for (let field in sample) { + const value = sample[field]; + + schema[field] = this.getType('', value, typeof value === 'object'); + } + + return schema; + }, + + getGeoPointSubtype(value) { + if (typeof value === "string") { + if (/\-?\d+\.\d+\,\-?\d+\.\d+/.test(value)) { + return "string"; + } else { + return "geohash"; + } + } else if (Array.isArray(value)) { + return "geoJSON"; + } else if (typeof value === "object") { + if (value.top_left && value.bottom_right) { + return "geo-bounding"; + } + } + + return "object"; + }, + + getGeoShapeSubtype(value) { + if (typeof value === "string") { + const isPoint = /^POINT\s*(.+)/i.test(value.trim()); + const isLinestring = /^LINESTRING\s*(.+)/i.test(value.trim()); + const isPolygon = /^POLYGON\s*(.+)/i.test(value.trim()); + const isMultipoint = /^MULTIPOINT\s*(.+)/i.test(value.trim()); + const isMultilinestring = /^MULTILINESTRING\s*(.+)/i.test(value.trim()); + const isMultipolygon = /^MULTIPOLYGON\s*(.+)/i.test(value.trim()); + const isGeometryCollection = /^GEOMETRYCOLLECTION\s*(.+)/i.test(value.trim()); + const isEnvelope = /^BBOX\s*(.+)/i.test(value.trim()); + + if (isPoint) { return "point"; } + if (isLinestring) { return "linestring"; } + if (isPolygon) { return "polygon"; } + if (isMultipoint) { return "multipoint"; } + if (isMultilinestring) { return "multilinestring"; } + if (isMultipolygon) { return "multipolygon"; } + if (isGeometryCollection) { return "geometrycollection"; } + if (isEnvelope) { return "envelope"; } + + } else if (typeof value === "object" && value.type) { + return value.type; + } else { + return "point"; + } + }, + + handleSnippet(schema) { + const snippet = snippets[schema.subType]; + if (snippet) { + if (snippet.parentType === 'array') { + schema.items = this.getSchemaFromSnippet(snippet); + } else { + schema.properties = this.getSchemaFromSnippet(snippet); + } + } + + return schema; + }, + + getSchemaFromSnippet(snippet) { + const isArray = snippet.type === 'array' || snippet.parentType === 'array'; + let schema = isArray ? [] : {}; + + for (let i in snippet.properties) { + const field = snippet.properties[i]; + let currentSchema = { + type: field.type + }; + + if (field.properties) { + const properties = this.getSchemaFromSnippet(field); + + if (currentSchema.type === 'array') { + currentSchema.items = properties; + } else { + currentSchema.properties = properties; + } + } + + if (field.sample) { + currentSchema.sample = field.sample; + } + + if (isArray) { + schema.push(currentSchema); + } else { + schema[field.name] = currentSchema; + } + } + + return schema; + }, + + setProperties(schema, fieldData) { + const properties = helper.getFieldProperties(schema.type, Object.assign({mode: fieldData.type}, fieldData), { "stringfields": "fields" }); + + for (let propName in properties) { + if (propName === 'fields') { + schema["stringfields"] = JSON.stringify(properties[propName], null, 4); + } else { + schema[propName] = properties[propName]; + } + } + + return schema; + } +}; diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 4f0220b..6d9bcb4 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,423 +1,393 @@ 'use strict'; -const config = require("./config"); -const documentClient = require("documentdb").DocumentClient; -const client = new documentClient(config.endpoint, { "masterKey": config.accountKey }); -const async = require('async'); +const elasticsearch = require('elasticsearch'); +const fs = require('fs'); const _ = require('lodash'); +const async = require('async'); +const SchemaCreator = require('./SchemaCreator'); +const versions = require('../package.json').contributes.target.versions; + +const MAX_DOCUMENTS = 30000; + +let connectionParams = {}; +let saveConnectionInfo = {}; + +let _client = null; module.exports = { - connect: function(connectionInfo, cb){ - cb() - }, + connect: function(connectionInfo, logger, cb){ + logger.clear(); + logger.log('error', connectionInfo, 'Connection information', connectionInfo.hiddenKeys); + + let authString = ""; + + if (_client !== null) { + return cb(null, _client); + } + + if (connectionInfo.username) { + authString = connectionInfo.username; + } + + if (connectionInfo.password) { + authString += ':' + connectionInfo.password; + } + + if (connectionInfo.connectionType === 'Direct connection') { + connectionParams.host = { + protocol: connectionInfo.protocol, + host: connectionInfo.host, + port: connectionInfo.port, + path: connectionInfo.path, + auth: authString + }; + } else if (connectionInfo.connectionType === 'Replica set or Sharded cluster') { + connectionParams.hosts = connectionInfo.hosts.map(socket => { + return { + host: socket.host, + port: socket.port, + protocol: connectionInfo.protocol, + auth: authString + }; + }); + } else { + cb('Invalid connection parameters'); + } + + if (connectionInfo.is_ssl) { + connectionParams.ssl = { + ca: fs.readFileSync(connectionInfo.ca), + rejectUnauthorized: connectionInfo.rejectUnauthorized + }; + } + + _client = new elasticsearch.Client(connectionParams); - disconnect: function(connectionInfo, cb){ - cb() + cb(null, _client); }, - testConnection: function(connectionInfo, cb){ - cb(true); + disconnect: function(connectionInfo, logger, cb){ + if (_client) { + _client.close(); + _client = null; + } + connectionParams = {}; + cb(); }, - getDatabases: function(connectionInfo, cb){ - listDatabases((err, dbs) => { - if(err){ - console.log(err); + testConnection: function(connectionInfo, logger, cb){ + this.connect(connectionInfo, logger, (err, connection) => { + if (err) { + cb(err); } else { - dbs = dbs.map(item => item.id); - cb(err, dbs); + connection.ping({ + requestTimeout: 5000 + }, (error, success) => { + this.disconnect(connectionInfo, logger, () => {}); + if (error) { + logger.log('error', error, 'Test connection', connectionInfo.hiddenKeys); + } + cb(!success); + }); } }); }, - getDocumentKinds: function(connectionInfo, cb) { - readDatabaseById(connectionInfo.database, (err, database) => { - if(err){ - console.log(err); - } else { - listCollections(database._self, (err, collections) => { - if(err){ - console.log(err); - dbItemCallback(err) - } else { + getDatabases: function(connectionInfo, logger, cb){ + cb(); + }, - async.map(collections, (collectionItem, collItemCallback) => { - readCollectionById(database.id, collectionItem.id, (err, collection) => { - if(err){ - console.log(err); - } else { - let size = getSampleDocSize(1000, connectionInfo.recordSamplingSettings) || 1000; + getDocumentKinds: function(connectionInfo, logger, cb) { + cb(); + }, - listDocuments(collection._self, size, (err, documents) => { - if(err){ - console.log(err); - } else { - documents = filterDocuments(documents); + getDbCollectionsNames: function(connectionInfo, logger, cb) { + this.connect(connectionInfo, logger, (err, client) => { + if (err) { + logger.log('error', err); + cb(err); + this.disconnect(connectionInfo, logger, () => {}); + return; + } + + const { includeSystemCollection } = connectionInfo; - let inferSchema = generateCustomInferSchema(collectionItem.id, documents, { sampleSize: 20 }); - let documentsPackage = getDocumentKindDataFromInfer({ bucketName: collectionItem.id, inference: inferSchema, isCustomInfer: true }, 90); + client.indices.getMapping() + .then(data => { + let result = []; - collItemCallback(err, documentsPackage); - } - }); - } - }); - }, (err, items) => { - if(err){ - console.log(err); - } - return cb(err, items); - }); - } - }); - } - }); - }, + for (let index in data) { + if (!includeSystemCollection && index[0] === '.') { + continue; + } - getDbCollectionsNames: function(connectionInfo, cb) { - readDatabaseById(connectionInfo.database, (err, database) => { - if(err){ - console.log(err); - } else { - listCollections(database._self, (err, collections) => { - if(err){ - console.log(err); - cb(err) - } else { - let collectionNames = collections.map(item => item.id); - handleBucket(connectionInfo, collectionNames, database, cb); + let dbItem = { + dbName: index, + dbCollections: [] + }; + + if (data[index].mappings) { + dbItem.dbCollections = Object.keys(data[index].mappings); + } + + result.push(dbItem); } + + cb(null, result); + }) + .catch(err => { + logger.log('error', err); + cb(err); }); - } }); }, - getDbCollectionsData: function(data, cb){ + getDbCollectionsData: function(data, logger, cb){ let includeEmptyCollection = data.includeEmptyCollection; let { recordSamplingSettings, fieldInference } = data; - let size = getSampleDocSize(1000, recordSamplingSettings) || 1000; - let bucketList = data.collectionData.dataBaseNames; - - readDatabaseById(data.database, (err, database) => { - if(err){ - console.log(err); - } else { - async.map(bucketList, (bucketName, collItemCallback) => { - readCollectionById(database.id, bucketName, (err, collection) => { - if(err){ - console.log(err); + const indices = data.collectionData.dataBaseNames; + const types = data.collectionData.collections; + + const bucketInfo = { + indexName: '_index', + indexType: 'string', + docTypeName: '_type', + docTypeType: 'string', + docIDName: '_id', + docIDType: 'string', + sourceName: '_source', + sourceType: 'object' + }; + + const containerLevelKeys = { + index: '_index', + docType: '_type', + docID: '_id', + source: '_source' + }; + + logger.log('info', getSamplingInfo(recordSamplingSettings, fieldInference), 'Reverse-Engineering sampling params', data.hiddenKeys); + logger.log('info', { Indices: indices }, 'Selected collection list', data.hiddenKeys); + + async.waterfall([ + (getDbInfo) => { + this.connect(data, logger, getDbInfo); + }, + (client, getMapping) => { + client.info().then(info => { + const socket = getInfoSocket(); + const modelInfo = { + modelName: info.name, + host: socket.host, + port: +socket.port, + version: getVersion(info.version.number, versions) + }; + + logger.log('info', { modelInfo }, 'Model info'); + + getMapping(null, client, modelInfo) + }).catch(() => getMapping(null, client)); + }, + + (client, modelInfo, getData) => { + getSchemaMapping(types, client).then((jsonSchemas) => { + getData(null, client, modelInfo, jsonSchemas); + }, (err) => { + logger.log('error', err, 'Error of getting schema'); + getData(null, client, modelInfo, null); + }); + }, + + (client, modelInfo, jsonSchemas, next) => { + async.map(indices, (indexName, nextIndex) => { + if (!types[indexName]) { + if (includeEmptyCollection) { + nextIndex(null, [{ + dbName: indexName, + emptyBucket: true, + containerLevelKeys, + bucketInfo + }]); } else { - getOfferType(collection, (err, info) => { - if(err){ - - } else { - let bucketInfo = { - throughput: info.content.offerThroughput, - rump: info.content.offerIsRUPerMinuteThroughputEnabled ? 'OFF' : 'On' - }; - - let indexes = getIndexes(collection.indexingPolicy); - - listDocuments(collection._self, size, (err, documents) => { - if(err){ - console.log(err); - } else { - documents = filterDocuments(documents); - let documentKindName = data.documentKinds[collection.id].documentKindName || '*'; - let docKindsList = data.collectionData.collections[bucketName]; - let collectionPackages = []; - - if(documentKindName !== '*'){ - docKindsList.forEach(docKindItem => { - let newArrayDocuments = documents.filter((item) => { - return item[documentKindName] === docKindItem; - }); - - let documentsPackage = { - dbName: bucketName, - collectionName: docKindItem, - documents: newArrayDocuments || [], - indexes: [], - bucketIndexes: indexes, - views: [], - validation: false, - docType: documentKindName, - bucketInfo - }; - - if(fieldInference.active === 'field'){ - documentsPackage.documentTemplate = documents[0] || null; - } - - collectionPackages.push(documentsPackage) - }); - } - - collItemCallback(err, collectionPackages); - } + nextIndex(null, [{}]); + } + } else { + async.map(types[indexName], (typeName, nextType) => { + async.waterfall([ + (getSampleDocSize) => { + client.count({ + index: indexName, + type: typeName + }, (err, response) => { + getSampleDocSize(err, response); }); + }, + + (response, searchData) => { + const per = recordSamplingSettings.relative.value; + const size = (recordSamplingSettings.active === 'absolute') + ? recordSamplingSettings.absolute.value + : Math.round(response.count / 100 * per); + const count = size > MAX_DOCUMENTS ? MAX_DOCUMENTS : size; + + searchData(null, count); + }, + + (size, getTypeData) => { + client.search({ + index: indexName, + type: typeName, + size + }, (err, data) => { + getTypeData(err, data); + }); + }, + + (data, nextCallback) => { + let documents = data.hits.hits; + const documentTemplate = documents.reduce((tpl, doc) => _.merge(tpl, doc), {}); + + let documentsPackage = { + dbName: indexName, + collectionName: typeName, + documents, + indexes: [], + bucketIndexes: [], + views: [], + validation: false, + emptyBucket: false, + containerLevelKeys, + bucketInfo + }; + + const hasJsonSchema = jsonSchemas && jsonSchemas[indexName] && jsonSchemas[indexName].mappings && jsonSchemas[indexName].mappings[typeName]; + + if (hasJsonSchema) { + documentsPackage.validation = { + jsonSchema: SchemaCreator.getSchema( + jsonSchemas[indexName].mappings[typeName], + documentTemplate + ) + }; + } + + if (fieldInference.active === 'field') { + documentsPackage.documentTemplate = documentTemplate; + } + + nextCallback(null, documentsPackage); } - }) - } - }); - }, (err, items) => { - if(err){ - console.log(err); + ], nextType); + }, (err, typeData) => { + if (err) { + nextIndex(err, typeData); + } else { + const filterData = typeData.filter(docPackage => { + if (!includeEmptyCollection) { + if ( + docPackage.documents.length === 0 + && + docPackage.validation + && + docPackage.validation.jsonSchema + && + docPackage.validation.jsonSchema.properties + && + docPackage.validation.jsonSchema.properties._source + && + _.isEmpty(docPackage.validation.jsonSchema.properties._source.properties) + ) { + return false; + } + } + + return true; + }); + nextIndex(null, filterData); + } + }); } - return cb(err, items); + }, (err, items) => { + next(err, items, modelInfo); }); } + ], (err, items, modelInfo) => { + if (err) { + logger.log('error', err); + this.disconnect(connectionInfo, logger, () => {}); + } + + cb(err, items, modelInfo); }); } }; - -function readCollectionById(dbLink, collectionId, callback) { - var collLink = `dbs/${dbLink}/colls/${collectionId}`; - - client.readCollection(collLink, function (err, coll) { - if (err) { - console.log(err); - callback(err); - } else { - callback(null, coll); - } - }); -} - -function getOfferType(collection, callback) { - var querySpec = { - query: 'SELECT * FROM root r WHERE r.resource = @link', - parameters: [ - { - name: '@link', - value: collection._self - } - ] - }; - - client.queryOffers(querySpec).toArray(function (err, offers) { - if (err) { - callback(err); - - } else if (offers.length === 0) { - callback('No offer found for collection'); - - } else { - var offer = offers[0]; - callback(null, offer); - } - }); -} - -function listDatabases(callback) { - var queryIterator = client.readDatabases().toArray(function (err, dbs) { - if (err) { - callback(err); - } - - callback(null, dbs); - }); -} - -function listCollections(databaseLink, callback) { - var queryIterator = client.readCollections(databaseLink).toArray(function (err, cols) { - if (err) { - callback(err); - } else { - callback(null, cols); - } - }); +function getSamplingInfo(recordSamplingSettings, fieldInference){ + let samplingInfo = {}; + let value = recordSamplingSettings[recordSamplingSettings.active].value; + let unit = (recordSamplingSettings.active === 'relative') ? '%' : ' records max'; + + samplingInfo.recordSampling = `${recordSamplingSettings.active} ${value}${unit}` + samplingInfo.fieldInference = (fieldInference.active === 'field') ? 'keep field order' : 'alphabetical order'; + + return samplingInfo; } -function readDatabaseById(databaseId, callback) { - client.readDatabase('dbs/' + databaseId, function (err, db) { - if (err) { - callback(err); - } else { - callback(null, db); - } - }); -} +function getVersion(version, versions) { + const arVersion = version.split('.'); + let result = ""; -function listDocuments(collLink, maxItemCount, callback) { - var queryIterator = client.readDocuments(collLink, { maxItemCount }).toArray(function (err, docs) { - if (err) { - callback(err); - } else { - callback(null, docs); - } - }); -} + versions.forEach(v => { + const arV = v.split('.'); + let isVersion = false; -function filterDocuments(documents){ - return documents.map(item =>{ - for(let prop in item){ - if(prop && prop[0] === '_'){ - delete item[prop]; + for (let i = 0; i < arV.length; i++) { + if (arV[0] === 'x') { + continue; } - } - return item; - }); -} -function generateCustomInferSchema(bucketName, documents, params){ - function typeOf(obj) { - return {}.toString.call(obj).split(' ')[1].slice(0, -1).toLowerCase(); - }; - - let sampleSize = params.sampleSize || 30; - - let inferSchema = { - "#docs": 0, - "$schema": "http://json-schema.org/schema#", - "properties": {} - }; - - documents.forEach(item => { - inferSchema["#docs"]++; - - for(let prop in item){ - if(inferSchema.properties.hasOwnProperty(prop)){ - inferSchema.properties[prop]["#docs"]++; - inferSchema.properties[prop]["samples"].indexOf(item[prop]) === -1 && inferSchema.properties[prop]["samples"].length < sampleSize? inferSchema.properties[prop]["samples"].push(item[prop]) : ''; - inferSchema.properties[prop]["type"] = typeOf(item[prop]); + if (arVersion[i] == arV[i]) { + result = v; } else { - inferSchema.properties[prop] = { - "#docs": 1, - "%docs": 100, - "samples": [item[prop]], - "type": typeOf(item[prop]) - } + break; } } }); - for (let prop in inferSchema.properties){ - inferSchema.properties[prop]["%docs"] = Math.round((inferSchema.properties[prop]["#docs"] / inferSchema["#docs"] * 100), 2); + if (result) { + return result; + } else { + return versions[versions.length - 1]; } - return inferSchema; } -function getDocumentKindDataFromInfer(data, probability){ - let suggestedDocKinds = []; - let otherDocKinds = []; - let documentKind = { - key: '', - probability: 0 - }; - - if(data.isCustomInfer){ - let minCount = Infinity; - let inference = data.inference.properties; - - for(let key in inference){ - if(config.excludeDocKind.indexOf(key) === -1){ - if(inference[key]["%docs"] >= probability && inference[key].samples.length && typeof inference[key].samples[0] !== 'object'){ - suggestedDocKinds.push(key); - - if(inference[key]["%docs"] >= documentKind.probability && inference[key].samples.length < minCount){ - minCount = inference[key].samples.length; - documentKind.probability = inference[key]["%docs"]; - documentKind.key = key; - } - } else { - otherDocKinds.push(key); - } - } - } +function getInfoSocket() { + if (connectionParams.host) { + return { + host: connectionParams.host.host, + port: connectionParams.host.port + }; + } else if (connectionParams.hosts) { + return { + host: connectionParams.hosts[0].host, + port: connectionParams.hosts[0].port + }; } else { - let flavor = (data.flavorValue) ? data.flavorValue.split(',') : data.inference[0].Flavor.split(','); - if(flavor.length === 1){ - suggestedDocKinds = Object.keys(data.inference[0].properties); - let matсhedDocKind = flavor[0].match(/([\s\S]*?) \= "?([\s\S]*?)"?$/); - documentKind.key = (matсhedDocKind.length) ? matсhedDocKind[1] : ''; + return { + host: "", + port: "" } } - - let documentKindData = { - bucketName: data.bucketName, - documentList: suggestedDocKinds, - documentKind: documentKind.key, - preSelectedDocumentKind: data.preSelectedDocumentKind, - otherDocKinds - }; - - return documentKindData; } -function handleBucket(connectionInfo, collectionNames, database, dbItemCallback){ - let size = getSampleDocSize(1000, connectionInfo.recordSamplingSettings) || 1000; - - async.map(collectionNames, (collectionName, collItemCallback) => { - readCollectionById(database.id, collectionName, (err, collection) => { - if(err){ - console.log(err); - } else { - listDocuments(collection._self, size, (err, documents) => { - if(err){ - console.log(err); - } else { - documents = filterDocuments(documents); - let documentKind = connectionInfo.documentKinds[collection.id].documentKindName || '*'; - let documentTypes = []; - - if(documentKind !== '*'){ - documentTypes = documents.map(function(doc){ - return doc[documentKind]; - }); - documentTypes = documentTypes.filter((item) => Boolean(item)); - documentTypes = _.uniq(documentTypes); - } - - let dataItem = prepareConnectionDataItem(documentTypes, collection.id, database); - collItemCallback(err, dataItem); - } - }); - } - }); - }, (err, items) => { - if(err){ - console.log(err); +function getSchemaMapping(indices, client) { + SchemaCreator.init(); + for (let indexName in indices) { + SchemaCreator.addIndex(indexName); + for (let i in indices[indexName]) { + SchemaCreator.addType(indices[indexName][i]); } - return dbItemCallback(err, items); - }); -} - -function prepareConnectionDataItem(documentTypes, bucketName, database){ - let uniqueDocuments = _.uniq(documentTypes); - let connectionDataItem = { - dbName: bucketName, - dbCollections: uniqueDocuments - }; - - return connectionDataItem; -} - -function getSampleDocSize(count, recordSamplingSettings) { - let per = recordSamplingSettings.relative.value; - return (recordSamplingSettings.active === 'absolute') - ? recordSamplingSettings.absolute.value - : Math.round( count/100 * per); -} - -function getIndexes(indexingPolicy){ - let generalIndexes = []; - - if(indexingPolicy){ - indexingPolicy.includedPaths.forEach(item => { - let indexes = item.indexes; - indexes = indexes.map(index => { - index.indexPrecision = index.precision; - index.automatic = item.automatic; - index.mode = indexingPolicy.indexingMode; - index.indexIncludedPath = item.path; - return index; - }); - - generalIndexes = generalIndexes.concat(generalIndexes, indexes); - }); } - return generalIndexes; + return SchemaCreator.getMapping(client); } \ No newline at end of file diff --git a/reverse_engineering/config.json b/reverse_engineering/config.json index ba61cc0..9dbd8ed 100644 --- a/reverse_engineering/config.json +++ b/reverse_engineering/config.json @@ -3,5 +3,6 @@ "NO_DATABASES": "There is no databases in Elasticsearch instance", "WRONG_CONNECTION": "Can not connect to Elasticsearch instance" }, + "scenario": "connectToDB", "excludeDocKind": ["id"] } \ No newline at end of file diff --git a/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json b/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json index faceedb..4311ad4 100644 --- a/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json +++ b/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json @@ -9,15 +9,125 @@ "inputPlaceholder": "Name" }, + { + "inputLabel": "Connection type", + "inputKeyword": "connectionType", + "inputType": "select", + "description": "", + "options": [{ + "value": "Direct connection", + "label": "Direct connection" + }, { + "value": "Replica set or Sharded cluster", + "label": "Replica set or Sharded cluster" + }], + "defaultValue": "Direct connection" + }, { "inputLabel": "Host", "inputKeyword": "host", - "inputType": "text" + "inputType": "text", + "dependency": { + "key": "connectionType", + "value": "Direct connection" + } }, { "inputLabel": "Port", "inputKeyword": "port", - "inputType": "numeric" + "inputType": "numeric", + "dependency": { + "key": "connectionType", + "value": "Direct connection" + } + }, + { + "inputLabel": "Hosts", + "inputKeyword": "hosts", + "inputType": "dynamicListInput", + "description": "", + "dependency": { + "key": "connectionType", + "value": "Replica set or Sharded cluster" + }, + "modalTitle": "Add new host", + "modalInputs": [ + { + "inputLabel": "Address", + "inputKeyword": "host", + "inputType": "text", + "inputPlaceholder": "Address", + "description": "" + }, + { + "inputLabel": "Port", + "inputKeyword": "port", + "inputType": "text", + "inputPlaceholder": "Port", + "description": "" + } + ] + }, + { + "inputLabel": "Protocol", + "inputKeyword": "protocol", + "inputType": "select", + "options": [{ + "value": "http", + "label": "http" + }, { + "value": "https", + "label": "https" + }], + "defaultValue": "http" + }, + { + "inputLabel": "Path", + "inputKeyword": "path", + "inputType": "text" + } + ] + }, + { + "lowerTab": "Authentication", + "structure": [ + { + "inputLabel": "User Name", + "inputKeyword": "username", + "inputType": "text", + "inputPlaceholder": "User Name" + + }, + { + "inputLabel": "Password", + "inputKeyword": "password", + "inputType": "password", + "inputTooltip": "Password", + "isHiddenKey": true, + "defaultValue": "" + } + ] + }, + { + "lowerTab": "SSL", + "structure": [ + { + "inputLabel": "Enable SSL", + "inputKeyword": "is_ssl", + "inputType": "checkbox" + + }, + { + "inputLabel": "Authority Certificate", + "inputKeyword": "ca", + "inputType": "file", + "extensions": ["pem", "crt", "key"], + "isHiddenKey": true + }, + { + "inputLabel": "Reject Unauthorized", + "inputKeyword": "rejectUnauthorized", + "inputType": "checkbox" } ] } diff --git a/reverse_engineering/package.json b/reverse_engineering/package.json index ad92464..324d58f 100644 --- a/reverse_engineering/package.json +++ b/reverse_engineering/package.json @@ -4,6 +4,8 @@ "description": "", "author": "Hackolade", "dependencies": { - "elasticsearch": "^13.3.1" + "async": "^2.5.0", + "elasticsearch": "^13.3.1", + "lodash": "^4.17.5" } } diff --git a/types/array.json b/types/array.json index 0e803bc..7e57ec2 100644 --- a/types/array.json +++ b/types/array.json @@ -3,8 +3,9 @@ "erdAbbreviation": "", "dtdAbbreviation": "[...]", "parentType": "array", - "sample": ["sample"], + "sample": [], "useSample": true, + "default": true, "defaultValues": { "properties": [], "primaryKey": false, diff --git a/types/date.json b/types/date.json index 16302de..961b167 100644 --- a/types/date.json +++ b/types/date.json @@ -3,6 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "{dt}", "parentType": "string", + "sample": "2018/03/15", "useSample": true, "defaultValues": { "relationshipType": "", diff --git a/types/nested.json b/types/nested.json index 7880fb8..bd43155 100644 --- a/types/nested.json +++ b/types/nested.json @@ -3,7 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "[...]", "parentType": "array", - "sample": ["sample"], + "sample": [], "useSample": true, "defaultValues": { "childValueType": "object", diff --git a/types/object.json b/types/object.json index eb1e71d..90a4c38 100644 --- a/types/object.json +++ b/types/object.json @@ -3,6 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "{...}", "parentType": "document", + "default": true, "defaultValues": { "primaryKey": false, "relationshipType": "", diff --git a/types/string.json b/types/string.json index c23f0ea..1e3ed89 100644 --- a/types/string.json +++ b/types/string.json @@ -3,6 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "{ABC}", "useSample": true, + "default": true, "defaultValues": { "minLength": "", "maxLength": "", @@ -18,6 +19,6 @@ "sample": "", "mode": "text", "index_options": "docs", - "similarity": "BM25" + "similarity": "" } } \ No newline at end of file