From 45952dafc28ab0d637369b40307777984c9c2039 Mon Sep 17 00:00:00 2001 From: Patrik Stas Date: Thu, 8 Sep 2022 00:54:18 +0200 Subject: [PATCH] Drive UI updates by ledgercpy workers events Signed-off-by: Patrik Stas --- .../app-config/sovrin-buildernet.json | 20 +++++++++ indyscan-api/nodemon-buildernet.json | 15 +++++++ indyscan-api/package.json | 1 + indyscan-api/src/service/service-storages.js | 2 +- indyscan-daemon/app-configs/sovbuilder.json | 9 ---- indyscan-daemon/package.json | 1 + indyscan-daemon/src/constants.js | 8 ++++ indyscan-daemon/src/index.js | 12 ++--- indyscan-daemon/src/logging/logger-builder.js | 8 ++-- indyscan-daemon/src/server/server.js | 28 +++++++++--- indyscan-daemon/src/server/wsockets.js | 10 ++--- .../src/worker-templates/rtw-db-expansion.js | 3 +- .../rtw-ledger-to-serialized.js | 4 +- indyscan-daemon/src/workers/worker-rtw.js | 20 ++++++--- .../test/unit/workers/timing.spec.js | 45 +++++++++++++++++++ indyscan-webapp/nodemon-indyscan.json | 13 ++++++ indyscan-webapp/nodemon.json | 3 +- indyscan-webapp/pages/home.js | 15 ++++--- indyscan-webapp/txtools/index.js | 18 ++++---- 19 files changed, 178 insertions(+), 57 deletions(-) create mode 100644 indyscan-api/app-config/sovrin-buildernet.json create mode 100644 indyscan-api/nodemon-buildernet.json create mode 100644 indyscan-daemon/src/constants.js create mode 100644 indyscan-daemon/test/unit/workers/timing.spec.js create mode 100644 indyscan-webapp/nodemon-indyscan.json diff --git a/indyscan-api/app-config/sovrin-buildernet.json b/indyscan-api/app-config/sovrin-buildernet.json new file mode 100644 index 00000000..e682f02a --- /dev/null +++ b/indyscan-api/app-config/sovrin-buildernet.json @@ -0,0 +1,20 @@ +[ + { + "id": "SOVRIN_BUILDERNET", + "ui": { + "priority": 1, + "display": "BuilderNet", + "display-long": "Sovrin BuilderNet", + "description": "For active development of your solution.", + "tutorial": "Get your DID and start writing on the network", + "tutorial-link": "https://selfserve.sovrin.org/", + "logo-address": "/static/sovrin.png" + }, + "aliases": [ + "sovbuilder" + ], + "es" : { + "index": "txs-sovbuilder" + } + } +] diff --git a/indyscan-api/nodemon-buildernet.json b/indyscan-api/nodemon-buildernet.json new file mode 100644 index 00000000..138e8730 --- /dev/null +++ b/indyscan-api/nodemon-buildernet.json @@ -0,0 +1,15 @@ +{ + "verbose": true, + "ignore": ["node_modules", ".next"], + "watch": ["src/**/*"], + "ext": "js json", + "env": { + "ES_URL": "http://localhost:9200", + "DAEMON_URL": "http://localhost:3709", + "LOG_LEVEL": "info", + "PORT" : 3708, + "LOG_HTTP_REQUESTS" : true, + "LOG_HTTP_RESPONSES" : true, + "NETWORKS_CONFIG_PATH": "./app-config/sovrin-buildernet.json" + } +} diff --git a/indyscan-api/package.json b/indyscan-api/package.json index e2588af2..4cc6f112 100644 --- a/indyscan-api/package.json +++ b/indyscan-api/package.json @@ -7,6 +7,7 @@ "lint": "standard", "lint:fix": "standard --fix", "dev": "nodemon src/index.js", + "dev:sovrin:builder": "nodemon --config nodemon-buildernet.json src/index.js", "test:unit": "jest tests/unit", "start": "cross-env NODE_ENV=production node src/index.js" }, diff --git a/indyscan-api/src/service/service-storages.js b/indyscan-api/src/service/service-storages.js index b36296f5..711eaf8e 100644 --- a/indyscan-api/src/service/service-storages.js +++ b/indyscan-api/src/service/service-storages.js @@ -8,7 +8,7 @@ const { createStorageReadEs } = require('indyscan-storage/src') async function createLedgerStorageManager (esUrl) { const storages = {} - logger.info(`Connecting to ElasticSearh '${esUrl}'.`) + logger.info(`Connecting to ElasticSearch '${esUrl}'.`) const esClient = new elasticsearch.Client({ node: esUrl }) async function addIndyNetwork (networkId, networkEsIndex) { diff --git a/indyscan-daemon/app-configs/sovbuilder.json b/indyscan-daemon/app-configs/sovbuilder.json index aefda9c2..eb18a2b9 100644 --- a/indyscan-daemon/app-configs/sovbuilder.json +++ b/indyscan-daemon/app-configs/sovbuilder.json @@ -12,15 +12,6 @@ "genesisPath": "{{{cfgdir}}}/genesis/{{{INDY_NETWORK}}}.txn", "esIndex": "{{{ES_INDEX}}}", "esUrl": "{{{ES_URL}}}", - "workerTiming": "SLOW" - } - }, - { - "builder": "rtwExpansion", - "params": { - "indyNetworkId": "{{{INDY_NETWORK}}}", - "esUrl": "{{{ES_URL}}}", - "esIndex": "{{{ES_INDEX}}}", "workerTiming": "MEDIUM" } } diff --git a/indyscan-daemon/package.json b/indyscan-daemon/package.json index f1c021ab..3f258014 100644 --- a/indyscan-daemon/package.json +++ b/indyscan-daemon/package.json @@ -14,6 +14,7 @@ "lint:fix": "standard --fix", "dev": "cross-env NODE_ENV=development nodemon src/index.js", "dev:sovrin:staging:builder": "cross-env NODE_ENV=development WORKER_CONFIGS=app-configs/sovstaging.json,app-configs/sovbuilder.json nodemon src/index.js", + "dev:sovrin:builder": "cross-env NODE_ENV=development WORKER_CONFIGS=app-configs/sovbuilder.json nodemon src/index.js", "dev:sovrin:staging": "cross-env NODE_ENV=development WORKER_CONFIGS=app-configs/sovstaging.json nodemon src/index.js", "dev:sovrin:sovmain": "cross-env NODE_ENV=development WORKER_CONFIGS=app-configs/sovmain.json nodemon src/index.js", "dev:sovrin": "cross-env NODE_ENV=development WORKER_CONFIGS=app-configs/sovmain.json,app-configs/sovstaging.json,app-configs/sovbuilder.json nodemon src/index.js", diff --git a/indyscan-daemon/src/constants.js b/indyscan-daemon/src/constants.js new file mode 100644 index 00000000..4dcca267 --- /dev/null +++ b/indyscan-daemon/src/constants.js @@ -0,0 +1,8 @@ +const OPERATION_TYPES = { + LEDGER_CPY: 'ledgercpy', + EXPANSION: 'expansion' +} + +module.exports = { + OPERATION_TYPES +} diff --git a/indyscan-daemon/src/index.js b/indyscan-daemon/src/index.js index 755f69d0..d6d59949 100644 --- a/indyscan-daemon/src/index.js +++ b/indyscan-daemon/src/index.js @@ -27,8 +27,8 @@ const { createNetOpRtwSerialization } = require('./worker-templates/rtw-ledger-t // } // }) -async function buildWorkers (builder, builderParams) { - logger.info(`Going to build workers by ${builder} from ${JSON.stringify(builderParams)}`) +async function buildWorker (builder, builderParams) { + logger.info(`Going to build worker by ${builder} from ${JSON.stringify(builderParams, null, 2)}`) if (builder === 'rtwSerialization') { return createNetOpRtwSerialization(builderParams) } else if (builder === 'rtwExpansion') { @@ -50,14 +50,14 @@ async function run () { await sleep(2000) logger.info(`Will bootstrap app from following operations definitions ${JSON.stringify(workerConfigPaths, null, 2)}`) - for (const workerConfigPath of workerConfigPaths) { + for (const workerConfigPath of workerConfigPaths) { // per each worker config file, render the file const workersConfig = fs.readFileSync(workerConfigPath) const { workersBuildersTemplate, env } = JSON.parse(workersConfig) env.cfgdir = path.dirname(workerConfigPath) - const workerBuilders = JSON.parse(Mustache.render(JSON.stringify(workersBuildersTemplate), env)) - for (const workerBuilder of workerBuilders) { + const workerBuilders = JSON.parse(Mustache.render(JSON.stringify(workersBuildersTemplate), env)) // render template + for (const workerBuilder of workerBuilders) { // one file can define multiple workers const { builder, params } = workerBuilder - const { workers, sources, targets, transformers, iterators } = await buildWorkers(builder, params) + const { workers, sources, targets, transformers, iterators } = await buildWorker(builder, params) allWorkers.push(workers) allSources.push(sources) allTargets.push(targets) diff --git a/indyscan-daemon/src/logging/logger-builder.js b/indyscan-daemon/src/logging/logger-builder.js index e9c4aa1d..f1be450e 100644 --- a/indyscan-daemon/src/logging/logger-builder.js +++ b/indyscan-daemon/src/logging/logger-builder.js @@ -2,10 +2,10 @@ const winston = require('winston') const mkdirp = require('mkdirp') const Elasticsearch = require('winston-elasticsearch') const { format } = require('winston') -const { timestamp, printf } = format +const { timestamp, printf, label } = format -const myFormat = printf(({ level, message, timestamp, metadaemon }) => { - return `${timestamp} [${metadaemon && metadaemon.workerId ? metadaemon.workerId : '--'}] ${level}: ${message}` +const myFormat = printf(({ label, level, message, timestamp, metadaemon }) => { + return `${timestamp} [${label}] ${level}: ${message}` }) function createLogger (loggerName, consoleLogsLevel, enableLogFiles) { @@ -13,8 +13,8 @@ function createLogger (loggerName, consoleLogsLevel, enableLogFiles) { transports: [ new winston.transports.Console({ level: consoleLogsLevel, - label: loggerName, format: winston.format.combine( + label({ label: loggerName }), timestamp(), myFormat, winston.format.colorize({ all: true }) diff --git a/indyscan-daemon/src/server/server.js b/indyscan-daemon/src/server/server.js index 57c03dd5..db25320c 100644 --- a/indyscan-daemon/src/server/server.js +++ b/indyscan-daemon/src/server/server.js @@ -6,6 +6,7 @@ const logger = require('../logging/logger-main') var pretty = require('express-prettify') const { createSocketioManager } = require('./wsockets') const { logRequests, logResponses } = require('./middleware') +const { OPERATION_TYPES } = require('../constants') function setupLoggingMiddlleware (app, enableRequestLogging, enableResponseLogging) { if (enableRequestLogging) { @@ -15,18 +16,30 @@ function setupLoggingMiddlleware (app, enableRequestLogging, enableResponseLoggi app.use(logResponses) } } +function linkLedgerCpyWorkersToSockets(socketioManager, serviceWorkers) { + logger.info(`Linking workers of operationType ${OPERATION_TYPES.LEDGER_CPY} with sockets.`) + const workerQuery = { operationTypes: [OPERATION_TYPES.LEDGER_CPY] } + const workers = serviceWorkers.getWorkers(workerQuery) + for (const worker of workers) { + const emitter = worker.getEventEmitter() + const { workerId, subledger, operationType, indyNetworkId } = worker.getWorkerInfo() + if (operationType === OPERATION_TYPES.LEDGER_CPY) { + socketioManager.forwardEmitterEventToWebsocket(emitter, workerId, 'tx-processed', 'tx-ledger-processed', indyNetworkId, subledger) + socketioManager.forwardEmitterEventToWebsocket(emitter, workerId, 'tx-rescan-scheduled', 'tx-ledger-rescan-scheduled', indyNetworkId, subledger) + } + } +} function linkExpansionWorkersToSockets (socketioManager, serviceWorkers) { - logger.info('Linking workers with sockets.') - const workerQuery = { operationTypes: ['expansion'] } + logger.info(`Linking workers of operationType ${OPERATION_TYPES.EXPANSION} with sockets.`) + const workerQuery = { operationTypes: [OPERATION_TYPES.EXPANSION] } const workers = serviceWorkers.getWorkers(workerQuery) for (const worker of workers) { const emitter = worker.getEventEmitter() - const { subledger, operationType, indyNetworkId } = worker.getWorkerInfo() - logger.info(`Setting up event->ws forward for ${operationType}/${indyNetworkId}/${subledger} `) - if (operationType === 'expansion') { - socketioManager.forwardEmitterEventToWebsocket(emitter, 'tx-processed', indyNetworkId, subledger) - socketioManager.forwardEmitterEventToWebsocket(emitter, 'rescan-scheduled', indyNetworkId, subledger) + const { workerId, subledger, operationType, indyNetworkId } = worker.getWorkerInfo() + if (operationType === OPERATION_TYPES.EXPANSION) { + socketioManager.forwardEmitterEventToWebsocket(emitter, workerId, 'tx-processed', 'tx-processed', indyNetworkId, subledger) + socketioManager.forwardEmitterEventToWebsocket(emitter, workerId, 'tx-rescan-scheduled', 'tx-rescan-scheduled', indyNetworkId, subledger) } } } @@ -47,6 +60,7 @@ function setupWebsockets (expressServer, serviceWorkers) { const socketioManager = createSocketioManager(expressServer) socketioManager.setupBasicSocketioListeners(createRoomJoinReactor(serviceWorkers)) linkExpansionWorkersToSockets(socketioManager, serviceWorkers) + linkLedgerCpyWorkersToSockets(socketioManager, serviceWorkers) } function startServer (serviceWorkers) { diff --git a/indyscan-daemon/src/server/wsockets.js b/indyscan-daemon/src/server/wsockets.js index c5f80739..71b2b9ca 100644 --- a/indyscan-daemon/src/server/wsockets.js +++ b/indyscan-daemon/src/server/wsockets.js @@ -6,17 +6,17 @@ function createSocketioManager (expressServer) { logger.info('Creating socketio manager') const io = socketio(expressServer) - function forwardEmitterEventToWebsocket (emitter, eventName, forwardToRoom, subledger) { - logger.info(`Linking worker emitter to sockets for indyNetworkId=${forwardToRoom} subledger=${subledger}, `) + function forwardEmitterEventToWebsocket (emitter, workerId, sourceEmitterEventName, targetSocketEventName, forwardToRoom) { + logger.info(`Worker ${workerId} events of name ${sourceEmitterEventName} will be broadcasted to room ${targetSocketEventName} as event ${targetSocketEventName} `) - emitter.on(eventName, (payload) => { + emitter.on(sourceEmitterEventName, (payload) => { io.of('/').in(`${forwardToRoom}`).clients((error, clients) => { if (error) { logger.error('Problem listing clients to print info.') } - logger.info(`Broadcasting into room ${forwardToRoom}: "${eventName}" to ids=${JSON.stringify(clients)}`) + logger.info(`Worker ${workerId} emitting sockets event ${targetSocketEventName} to room ${forwardToRoom} to ${clients.length} clients`) }) - io.to(forwardToRoom).emit(eventName, payload) + io.to(forwardToRoom).emit(targetSocketEventName, payload) }) } diff --git a/indyscan-daemon/src/worker-templates/rtw-db-expansion.js b/indyscan-daemon/src/worker-templates/rtw-db-expansion.js index a44cf217..bc5e8b74 100644 --- a/indyscan-daemon/src/worker-templates/rtw-db-expansion.js +++ b/indyscan-daemon/src/worker-templates/rtw-db-expansion.js @@ -5,9 +5,10 @@ const { createTargetElasticsearch } = require('../targets/target-elasticsearch') const { createWorkerRtw } = require('../workers/worker-rtw') const { createIteratorGuided } = require('../iterators/iterator-guided') const { createSourceElasticsearch } = require('../sources/source-elasticsearch') +const { OPERATION_TYPES } = require('../constants') async function createNetOpRtwExpansion ({ indyNetworkId, esUrl, esIndex, workerTiming }) { - const operationType = 'expansion' + const operationType = OPERATION_TYPES.EXPANSION const sourceEs = await createSourceElasticsearch({ indyNetworkId, diff --git a/indyscan-daemon/src/worker-templates/rtw-ledger-to-serialized.js b/indyscan-daemon/src/worker-templates/rtw-ledger-to-serialized.js index ecc8611d..dcf4a361 100644 --- a/indyscan-daemon/src/worker-templates/rtw-ledger-to-serialized.js +++ b/indyscan-daemon/src/worker-templates/rtw-ledger-to-serialized.js @@ -4,9 +4,11 @@ const { createIteratorGuided } = require('../iterators/iterator-guided') const { createTargetElasticsearch } = require('../targets/target-elasticsearch') const { createSourceElasticsearch } = require('../sources/source-elasticsearch') const { createSourceLedger } = require('../sources/source-ledger') +const { OPERATION_TYPES } = require('../constants') + async function createNetOpRtwSerialization ({ indyNetworkId, genesisPath, esUrl, esIndex, workerTiming }) { - const operationType = 'ledgercpy' + const operationType = OPERATION_TYPES.LEDGER_CPY const sourceLedger = await createSourceLedger({ name: indyNetworkId, genesisPath diff --git a/indyscan-daemon/src/workers/worker-rtw.js b/indyscan-daemon/src/workers/worker-rtw.js index 5d042d27..98759ce3 100644 --- a/indyscan-daemon/src/workers/worker-rtw.js +++ b/indyscan-daemon/src/workers/worker-rtw.js @@ -10,12 +10,15 @@ const { envConfig } = require('../config/env') function getExpandedTimingConfig (providedTimingSetup) { let presetData - if (!providedTimingSetup || (typeof providedTimingSetup !== 'string')) { + if (!providedTimingSetup) { presetData = getDefaultPreset() - } else { + } else if (typeof providedTimingSetup === 'string') { presetData = resolvePreset(providedTimingSetup) || getDefaultPreset() + } else if (typeof providedTimingSetup === 'object') { + const defaultPreset = getDefaultPreset() + presetData = { ... defaultPreset, ... providedTimingSetup } } - return Object.assign(presetData, providedTimingSetup) + return presetData } function validateTimingConfig (timingConfig) { @@ -38,10 +41,10 @@ function validateTimingConfig (timingConfig) { } async function createWorkerRtw ({ indyNetworkId, subledger, operationType, iterator, iteratorTxFormat, transformer, target, timing }) { - console.log(`BUILDING WORKER for ${indyNetworkId}... timit= ${timing}`) - const eventEmitter = new EventEmitter() const workerId = `${indyNetworkId}.${subledger}.${operationType}` const logger = createLogger(workerId, envConfig.LOG_LEVEL, envConfig.ENABLE_LOGFILES) + logger.info(`Building RTW worker ${workerId} for network: ${indyNetworkId}`) + const eventEmitter = new EventEmitter() const loggerMetadata = { metadaemon: { workerId, @@ -81,7 +84,7 @@ async function createWorkerRtw ({ indyNetworkId, subledger, operationType, itera throw Error(errMsg) } timing = getExpandedTimingConfig(timing) - logger.info(`Worker ${workerId} using timing ${JSON.stringify(timing)}`) + logger.info(`Effective timing configuration ${JSON.stringify(timing, null, 2)}`) validateTimingConfig(timing) const { timeoutOnSuccess, timeoutOnTxIngestionError, timeoutOnLedgerResolutionError, timeoutOnTxNoFound, jitterRatio } = timing @@ -379,4 +382,7 @@ async function createWorkerRtw ({ indyNetworkId, subledger, operationType, itera } } -module.exports.createWorkerRtw = createWorkerRtw +module.exports = { + createWorkerRtw, + getExpandedTimingConfig +} diff --git a/indyscan-daemon/test/unit/workers/timing.spec.js b/indyscan-daemon/test/unit/workers/timing.spec.js new file mode 100644 index 00000000..cbb568e8 --- /dev/null +++ b/indyscan-daemon/test/unit/workers/timing.spec.js @@ -0,0 +1,45 @@ +const { getExpandedTimingConfig } = require('../../../src/workers/worker-rtw') +const sleep = require('sleep-promise') + +describe('worker timing configuration', () => { + it('should expand timing configuration', async () => { + const timing = getExpandedTimingConfig("FAST") + const expected = { + "timeoutOnSuccess": 1000, + "timeoutOnTxIngestionError": 30000, + "timeoutOnLedgerResolutionError": 30000, + "timeoutOnTxNoFound": 3000, + "jitterRatio": 0.1 + } + expect(timing).toStrictEqual(expected) + }) + + it('should expand timing configuration', async () => { + const timing = { + "timeoutOnSuccess": 1234, + "timeoutOnTxIngestionError": 2345, + "timeoutOnLedgerResolutionError": 3456, + "timeoutOnTxNoFound": 9999, + "jitterRatio": 0.1 + } + const expandedTimingConfig = getExpandedTimingConfig(timing) + expect(expandedTimingConfig).toStrictEqual(timing) + }) + + it('should expand missing timing configuration with defaults', async () => { + const timing = { + "timeoutOnTxIngestionError": 60000, + "timeoutOnLedgerResolutionError": 60000, + "timeoutOnTxNoFound": 9000, + } + const expandedTimingConfig = getExpandedTimingConfig(timing) + const expected = { + "timeoutOnSuccess": 4000, + "timeoutOnTxIngestionError": 60000, + "timeoutOnLedgerResolutionError": 60000, + "timeoutOnTxNoFound": 9000, + "jitterRatio": 0.1 + } + expect(expandedTimingConfig).toStrictEqual(expected) + }) +}) diff --git a/indyscan-webapp/nodemon-indyscan.json b/indyscan-webapp/nodemon-indyscan.json new file mode 100644 index 00000000..c5e3311e --- /dev/null +++ b/indyscan-webapp/nodemon-indyscan.json @@ -0,0 +1,13 @@ +{ + "verbose": true, + "ignore": ["node_modules", ".next"], + "watch": ["server/**/*", "index.js"], + "ext": "js json", + "env": { + "INDYSCAN_API_URL": "https://indyscan.io", + "PORT" : 3707, + "LOG_LEVEL": "debug", + "LOG_HTTP_REQUESTS" : true, + "LOG_HTTP_RESPONSES" : true + } +} diff --git a/indyscan-webapp/nodemon.json b/indyscan-webapp/nodemon.json index c5e3311e..2b2fe497 100644 --- a/indyscan-webapp/nodemon.json +++ b/indyscan-webapp/nodemon.json @@ -4,7 +4,8 @@ "watch": ["server/**/*", "index.js"], "ext": "js json", "env": { - "INDYSCAN_API_URL": "https://indyscan.io", + "INDYSCAN_API_URL": "http://localhost:3708", + "DAEMON_WS_URL": "http://localhost:3709", "PORT" : 3707, "LOG_LEVEL": "debug", "LOG_HTTP_REQUESTS" : true, diff --git a/indyscan-webapp/pages/home.js b/indyscan-webapp/pages/home.js index e150f774..aaaa4f24 100644 --- a/indyscan-webapp/pages/home.js +++ b/indyscan-webapp/pages/home.js @@ -17,13 +17,14 @@ class HomePage extends Component { static async getInitialProps ({ req, query }) { const baseUrl = getBaseUrl(req) const { network } = query - const features = await fetch(`${baseUrl}/features`) + const featuresRes = await fetch(`${baseUrl}/features`) + const features = (await featuresRes.json()) const versionRes = await fetch(`${baseUrl}/version`) const version = (await versionRes.json()).version const networkDetails = await getNetwork(baseUrl, network) - const domainTxs = await getTxs(baseUrl, network, 'domain', 0, 13, [], 'full') - const poolTxs = await getTxs(baseUrl, network, 'pool', 0, 13, [], 'full') - const configTxs = await getTxs(baseUrl, network, 'config', 0, 13, [], 'full') + const domainTxs = await getTxs(baseUrl, network, 'domain', 0, 13, [], 'serialized') + const poolTxs = await getTxs(baseUrl, network, 'pool', 0, 13, [], 'serialized') + const configTxs = await getTxs(baseUrl, network, 'config', 0, 13, [], 'serialized') return { features, networkDetails, @@ -86,10 +87,11 @@ class HomePage extends Component { this.setState({ poolTxs }) } - onTxProcessed (payload) { + onTxDiscovered (payload) { this.setState({ animateFirst: true }) // const {workerData, txData} = payload const { txData } = payload + console.log(`onTxDiscovered >>> ${JSON.stringify(txData)}`) if (txData.imeta.subledger === 'domain') { this.addNewDomainTx(txData) } @@ -157,7 +159,8 @@ class HomePage extends Component { console.log(`switched-room-notification: Entered room ${activeWsRoom}`) this.setState({activeWsRoom}) socket.on('rescan-scheduled', this.onRescanScheduled.bind(this)) - socket.on('tx-processed', this.onTxProcessed.bind(this)) + // socket.on('tx-processed', this.onTxProcessed.bind(this)) + socket.on('tx-ledger-processed', this.onTxDiscovered.bind(this)) console.log(`Registered hooks on the socket! ${socket.hasListeners()}`) }) diff --git a/indyscan-webapp/txtools/index.js b/indyscan-webapp/txtools/index.js index 39292220..aec12480 100644 --- a/indyscan-webapp/txtools/index.js +++ b/indyscan-webapp/txtools/index.js @@ -111,18 +111,18 @@ const txDataDescriptiveExtractors = { 'UNKNOWN': empty } -export function extractTxDataBasic (txFull) { - const { seqNo } = txFull.imeta +export function extractTxDataBasic (tx) { + const { seqNo } = tx.imeta let txnId, txnTimeIso8601, typeName, from, indexedFields - if (txFull?.idata?.expansion) { - typeName = txFull.idata.expansion.idata.txn.typeName - txnId = txFull.idata.expansion.idata.txnMetadata.txnId - const epoch = txFull.idata.expansion?.idata?.txnMetadata?.txnTime + if (tx?.idata?.expansion) { + typeName = tx.idata.expansion.idata.txn.typeName + txnId = tx.idata.expansion.idata.txnMetadata.txnId + const epoch = tx.idata.expansion?.idata?.txnMetadata?.txnTime txnTimeIso8601 = epoch ? new Date(epoch).toISOString() : null - from = txFull?.idata?.expansion?.idata?.txn?.metadata?.from || '-' + from = tx?.idata?.expansion?.idata?.txn?.metadata?.from || '-' indexedFields = true - } else if (txFull?.idata?.serialized) { - const deserializedOriginal = JSON.parse(txFull.idata.serialized.idata.json) + } else if (tx?.idata?.json || tx?.idata?.serialized) { + const deserializedOriginal = JSON.parse(tx?.idata?.json || tx?.idata?.serialized?.idata?.json) txnId = deserializedOriginal.txnMetadata.txnId const epoch = deserializedOriginal.txnMetadata.txnTime * 1000 txnTimeIso8601 = epoch ? new Date(epoch).toISOString() : null