From 727f5e24200c0007e172c946ebbe7095081bd85e Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Fri, 27 Dec 2024 15:30:57 +0100 Subject: [PATCH 01/27] Fix get on migrated assets --- .../v1-0-0-handle-get-request-command.js | 13 +++-- .../get/sender/get-validate-asset-command.js | 25 ++++----- .../protocols/get/sender/local-get-command.js | 51 +++++++++++++------ .../v1.0.0/v1-0-0-get-request-command.js | 17 ++++--- .../http-api/v0/get-http-api-controller-v0.js | 12 ++--- .../http-api/v1/get-http-api-controller-v1.js | 12 ++--- src/controllers/rpc/get-rpc-controller.js | 1 + 7 files changed, 80 insertions(+), 51 deletions(-) diff --git a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js index 6d4efc513..1b76bdad7 100644 --- a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js +++ b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js @@ -37,6 +37,7 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { ual, includeMetadata, isOperationV0, + isOldContract, } = commandData; let { assertionId, knowledgeAssetId } = commandData; @@ -109,6 +110,7 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { ); let assertionPromise; + let notMigrated = false; if (!assertionId) { assertionId = await this.tripleStoreService.getLatestAssertionId( @@ -148,6 +150,10 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { return fallbackResult; } + if (!isOperationV0) { + notMigrated = true; + } + this.operationIdService.emitChangeEvent( OPERATION_ID_STATUS.GET.GET_REMOTE_GET_ASSERTION_END, operationId, @@ -215,9 +221,10 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { const [assertion, metadata] = await Promise.all(promises); const responseData = { - assertion: isOperationV0 - ? [...(assertion.public ?? []), ...(assertion.private ?? [])] - : assertion, + assertion: + (isOperationV0 || notMigrated) && isOldContract + ? [...(assertion.public ?? []), ...(assertion.private ?? [])] + : assertion, ...(includeMetadata && metadata && { metadata }), }; diff --git a/src/commands/protocols/get/sender/get-validate-asset-command.js b/src/commands/protocols/get/sender/get-validate-asset-command.js index 1b5423490..9831b5c2c 100644 --- a/src/commands/protocols/get/sender/get-validate-asset-command.js +++ b/src/commands/protocols/get/sender/get-validate-asset-command.js @@ -1,10 +1,6 @@ import ValidateAssetCommand from '../../../common/validate-asset-command.js'; import Command from '../../../command.js'; -import { - OPERATION_ID_STATUS, - ERROR_TYPE, - OLD_CONTENT_STORAGE_MAP, -} from '../../../../constants/constants.js'; +import { OPERATION_ID_STATUS, ERROR_TYPE } from '../../../../constants/constants.js'; class GetValidateAssetCommand extends ValidateAssetCommand { constructor(ctx) { @@ -27,8 +23,15 @@ class GetValidateAssetCommand extends ValidateAssetCommand { * @param command */ async execute(command) { - const { operationId, blockchain, contract, knowledgeCollectionId, ual, isOperationV0 } = - command.data; + const { + operationId, + blockchain, + contract, + knowledgeCollectionId, + ual, + isOperationV0, + isOldContract, + } = command.data; await this.operationIdService.updateOperationIdStatus( operationId, blockchain, @@ -53,13 +56,7 @@ class GetValidateAssetCommand extends ValidateAssetCommand { blockchain, ); // TODO: Update to validate knowledge asset index - // TODO: Use isOldContract as variable and pass it through with command.data since it's used - if ( - !isOperationV0 && - Object.values(OLD_CONTENT_STORAGE_MAP).every( - (ca) => !ca.toLowerCase().includes(contract.toLowerCase()), - ) - ) { + if (!isOperationV0 && !isOldContract) { const isValidUal = await this.validationService.validateUal( blockchain, contract, diff --git a/src/commands/protocols/get/sender/local-get-command.js b/src/commands/protocols/get/sender/local-get-command.js index 11001d308..79d2bc472 100644 --- a/src/commands/protocols/get/sender/local-get-command.js +++ b/src/commands/protocols/get/sender/local-get-command.js @@ -35,6 +35,7 @@ class LocalGetCommand extends Command { contentType, assertionId, isOperationV0, + isOldContract, } = command.data; let { knowledgeAssetId } = command.data; await this.operationIdService.updateOperationIdStatus( @@ -103,23 +104,13 @@ class LocalGetCommand extends Command { ); let assertionPromise; + let notMigrated = false; if (assertionId) { assertionPromise = (async () => { let result = null; - for (const repository of [ - TRIPLE_STORE_REPOSITORIES.PRIVATE_CURRENT, - TRIPLE_STORE_REPOSITORIES.PUBLIC_CURRENT, - ]) { - // eslint-disable-next-line no-await-in-loop - result = await this.tripleStoreService.getV6Assertion(repository, assertionId); - if (result?.length) { - break; - } - } - - if (!result?.length) { + if (!isOperationV0) { result = await this.tripleStoreService.getAssertion( blockchain, contract, @@ -128,6 +119,35 @@ class LocalGetCommand extends Command { contentType, ); } + + if (!result?.length) { + for (const repository of [ + TRIPLE_STORE_REPOSITORIES.PRIVATE_CURRENT, + TRIPLE_STORE_REPOSITORIES.PUBLIC_CURRENT, + ]) { + // eslint-disable-next-line no-await-in-loop + result = await this.tripleStoreService.getV6Assertion( + repository, + assertionId, + ); + if (result?.length) { + if (!isOperationV0) { + notMigrated = true; + } + break; + } + } + if (!result?.length && isOperationV0) { + result = await this.tripleStoreService.getAssertion( + blockchain, + contract, + knowledgeCollectionId, + knowledgeAssetId, + contentType, + ); + } + } + this.operationIdService.emitChangeEvent( OPERATION_ID_STATUS.GET.GET_LOCAL_GET_ASSERTION_END, operationId, @@ -198,9 +218,10 @@ class LocalGetCommand extends Command { const [assertion, metadata] = await Promise.all(promises); const responseData = { - assertion: isOperationV0 - ? [...(assertion?.public ?? []), ...(assertion?.private ?? [])] - : assertion, + assertion: + (isOperationV0 || notMigrated) && isOldContract + ? [...(assertion?.public ?? []), ...(assertion?.private ?? [])] + : assertion, ...(includeMetadata && metadata && { metadata }), }; diff --git a/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js b/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js index cf2e0cd52..d292d9e9b 100644 --- a/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js +++ b/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js @@ -7,7 +7,6 @@ import { OPERATION_STATUS, OPERATION_ID_STATUS, PRIVATE_HASH_SUBJECT_PREFIX, - OLD_CONTENT_STORAGE_MAP, } from '../../../../../constants/constants.js'; class GetRequestCommand extends ProtocolRequestCommand { @@ -52,6 +51,7 @@ class GetRequestCommand extends ProtocolRequestCommand { paranetId, isOperationV0, assertionId, + isOldContract, } = command.data; return { @@ -65,16 +65,19 @@ class GetRequestCommand extends ProtocolRequestCommand { paranetId, isOperationV0, assertionId, + isOldContract, }; } async handleAck(command, responseData) { - const { blockchain, contract, knowledgeCollectionId, knowledgeAssetId, isOperationV0 } = - command.data; - - const isOldContract = Object.values(OLD_CONTENT_STORAGE_MAP).some((ca) => - ca.toLowerCase().includes(contract.toLowerCase()), - ); + const { + blockchain, + contract, + knowledgeCollectionId, + knowledgeAssetId, + isOperationV0, + isOldContract, + } = command.data; if (responseData?.assertion?.public) { // Only whole collection can be validated not particular KA diff --git a/src/controllers/http-api/v0/get-http-api-controller-v0.js b/src/controllers/http-api/v0/get-http-api-controller-v0.js index 39c1f370c..60569f7b5 100644 --- a/src/controllers/http-api/v0/get-http-api-controller-v0.js +++ b/src/controllers/http-api/v0/get-http-api-controller-v0.js @@ -65,14 +65,13 @@ class GetController extends BaseController { // Get assertionId - datasetRoot // + const isOldContract = Object.values(OLD_CONTENT_STORAGE_MAP).some((ca) => + ca.toLowerCase().includes(contract.toLowerCase()), + ); + const commandSequence = []; - if ( - !tripleStoreMigrationAlreadyExecuted && - Object.values(OLD_CONTENT_STORAGE_MAP) - .map((ca) => ca.toLowerCase()) - .includes(contract.toLowerCase()) - ) { + if (!tripleStoreMigrationAlreadyExecuted && isOldContract) { commandSequence.push('getAssertionMerkleRootCommand'); } @@ -91,6 +90,7 @@ class GetController extends BaseController { knowledgeAssetId, operationId, paranetUAL, + isOldContract, contentType: contentType ?? TRIPLES_VISIBILITY.ALL, isOperationV0: true, }, diff --git a/src/controllers/http-api/v1/get-http-api-controller-v1.js b/src/controllers/http-api/v1/get-http-api-controller-v1.js index 1993bca28..56cb167ac 100644 --- a/src/controllers/http-api/v1/get-http-api-controller-v1.js +++ b/src/controllers/http-api/v1/get-http-api-controller-v1.js @@ -66,15 +66,14 @@ class GetController extends BaseController { // Get assertionId - datasetRoot // + const isOldContract = Object.values(OLD_CONTENT_STORAGE_MAP).some((ca) => + ca.toLowerCase().includes(contract.toLowerCase()), + ); + const commandSequence = []; commandSequence.push('getValidateAssetCommand'); - if ( - !tripleStoreMigrationAlreadyExecuted && - Object.values(OLD_CONTENT_STORAGE_MAP) - .map((ca) => ca.toLowerCase()) - .includes(contract.toLowerCase()) - ) { + if (!tripleStoreMigrationAlreadyExecuted && isOldContract) { commandSequence.push('getAssertionMerkleRootCommand'); } @@ -93,6 +92,7 @@ class GetController extends BaseController { knowledgeAssetId, operationId, paranetUAL, + isOldContract, contentType: contentType ?? TRIPLES_VISIBILITY.ALL, }, transactional: false, diff --git a/src/controllers/rpc/get-rpc-controller.js b/src/controllers/rpc/get-rpc-controller.js index 32b242b84..d587e9144 100644 --- a/src/controllers/rpc/get-rpc-controller.js +++ b/src/controllers/rpc/get-rpc-controller.js @@ -39,6 +39,7 @@ class GetController extends BaseController { paranetId: message.data.paranetId, isOperationV0: message.data.isOperationV0, assertionId: message.data.assertionId, + isOldContract: message.data.isOldContract, }, transactional: false, }); From 7c2db0e21d41a2cfd42b2059318a233a19cd4547 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Fri, 27 Dec 2024 16:14:44 +0100 Subject: [PATCH 02/27] Fix fetching v6 assertion first --- .../v1-0-0-handle-get-request-command.js | 61 +++++++++++-------- 1 file changed, 34 insertions(+), 27 deletions(-) diff --git a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js index 1b76bdad7..731356946 100644 --- a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js +++ b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js @@ -125,43 +125,50 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { if (assertionId) { // DO NOT RUN THIS IF !assertionId - assertionPromise = this.tripleStoreService - .getV6Assertion(TRIPLE_STORE_REPOSITORIES.PUBLIC_CURRENT, assertionId) - .then(async (result) => { - if (!result?.length) { - this.logger.info( - `No V6 assertion found for assertionId: ${assertionId}, falling back to V8 getAssertion`, - ); + assertionPromise = (async () => { + let result = null; + + if (!isOperationV0) { + result = await this.tripleStoreService.getAssertion( + blockchain, + contract, + knowledgeCollectionId, + knowledgeAssetId, + TRIPLES_VISIBILITY.PUBLIC, + ); + } - const fallbackResult = await this.tripleStoreService.getAssertion( + if (!result?.length) { + // eslint-disable-next-line no-await-in-loop + result = await this.tripleStoreService.getV6Assertion( + TRIPLE_STORE_REPOSITORIES.PUBLIC_CURRENT, + assertionId, + ); + if (result?.length && !isOperationV0) { + notMigrated = true; + } + + if (!result?.length && isOperationV0) { + result = await this.tripleStoreService.getAssertion( blockchain, contract, knowledgeCollectionId, knowledgeAssetId, TRIPLES_VISIBILITY.PUBLIC, ); - - this.operationIdService.emitChangeEvent( - OPERATION_ID_STATUS.GET.GET_REMOTE_GET_ASSERTION_END, - operationId, - blockchain, - ); - - return fallbackResult; - } - - if (!isOperationV0) { - notMigrated = true; } + } - this.operationIdService.emitChangeEvent( - OPERATION_ID_STATUS.GET.GET_REMOTE_GET_ASSERTION_END, - operationId, - blockchain, - ); + this.operationIdService.emitChangeEvent( + OPERATION_ID_STATUS.GET.GET_REMOTE_GET_ASSERTION_END, + operationId, + blockchain, + ); - return result.split('\n').filter((res) => res.length > 0); - }); + return typeof result === 'string' + ? result.split('\n').filter((res) => res.length > 0) + : result; + })(); } else { if (!knowledgeAssetId) { try { From 095fa7cc06b5b23816ce6286c57a6aff8ca7b48e Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 10:47:55 +0100 Subject: [PATCH 03/27] Fix create paranet synced asset record table migration --- ...40924161700-create-paranet-synced-asset.js | 76 +++++++++++++------ 1 file changed, 52 insertions(+), 24 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20240924161700-create-paranet-synced-asset.js b/src/modules/repository/implementation/sequelize/migrations/20240924161700-create-paranet-synced-asset.js index 3c7138611..f84a5bbf6 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20240924161700-create-paranet-synced-asset.js +++ b/src/modules/repository/implementation/sequelize/migrations/20240924161700-create-paranet-synced-asset.js @@ -45,34 +45,63 @@ export const up = async ({ context: { queryInterface, Sequelize } }) => { }, }); - await queryInterface.sequelize.query(` - CREATE TRIGGER before_insert_paranet_synced_asset - BEFORE INSERT ON paranet_synced_asset - FOR EACH ROW - SET NEW.created_at = NOW(); + const [triggerInsertExists] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS trigger_exists + FROM information_schema.triggers + WHERE trigger_schema = DATABASE() + AND trigger_name = 'before_insert_paranet_synced_asset'; `); + if (triggerInsertExists[0].trigger_exists === 0) { + await queryInterface.sequelize.query(` + CREATE TRIGGER before_insert_paranet_synced_asset + BEFORE INSERT ON paranet_synced_asset + FOR EACH ROW + BEGIN + SET NEW.created_at = NOW(); + END; + `); + } - await queryInterface.sequelize.query(` - CREATE TRIGGER before_update_paranet_synced_asset - BEFORE UPDATE ON paranet_synced_asset - FOR EACH ROW - SET NEW.updated_at = NOW(); + const [triggerUpdateExists] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS trigger_exists + FROM information_schema.triggers + WHERE trigger_schema = DATABASE() + AND trigger_name = 'before_update_paranet_synced_asset'; `); + if (triggerUpdateExists[0].trigger_exists === 0) { + await queryInterface.sequelize.query(` + CREATE TRIGGER before_update_paranet_synced_asset + BEFORE UPDATE ON paranet_synced_asset + FOR EACH ROW + BEGIN + SET NEW.updated_at = NOW(); + END; + `); + } - await queryInterface.sequelize.query(` - CREATE INDEX idx_paranet_ual_created_at - ON paranet_synced_asset (paranet_ual, created_at); - `); + const indexes = [ + { name: 'idx_paranet_ual_created_at', columns: '(paranet_ual, created_at)' }, + { name: 'idx_sender', columns: '(sender)' }, + { name: 'idx_paranet_ual_unique', columns: '(paranet_ual)' }, + ]; - await queryInterface.sequelize.query(` - CREATE INDEX idx_sender - ON paranet_synced_asset (sender); - `); - - await queryInterface.sequelize.query(` - CREATE INDEX idx_paranet_ual_unique - ON paranet_synced_asset (paranet_ual); - `); + for (const index of indexes) { + // eslint-disable-next-line no-await-in-loop + const [indexExists] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS index_exists + FROM information_schema.statistics + WHERE table_schema = DATABASE() + AND table_name = 'paranet_synced_asset' + AND index_name = '${index.name}'; + `); + if (indexExists[0].index_exists === 0) { + // eslint-disable-next-line no-await-in-loop + await queryInterface.sequelize.query(` + CREATE INDEX ${index.name} + ON paranet_synced_asset ${index.columns}; + `); + } + } }; export const down = async ({ context: { queryInterface } }) => { @@ -82,7 +111,6 @@ export const down = async ({ context: { queryInterface } }) => { DROP TRIGGER IF EXISTS before_insert_paranet_synced_asset; `); - // Delete the before-update trigger await queryInterface.sequelize.query(` DROP TRIGGER IF EXISTS before_update_paranet_synced_asset; `); From 423723efad1e5c7b02837d0ff186b18a82e17026 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Mon, 30 Dec 2024 11:09:49 +0100 Subject: [PATCH 04/27] Add main dir variable --- v8-data-migration/constants.js | 13 +++++++------ v8-data-migration/run-data-migration.sh | 6 ++++-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/v8-data-migration/constants.js b/v8-data-migration/constants.js index 53f84f4ef..32b10c37f 100644 --- a/v8-data-migration/constants.js +++ b/v8-data-migration/constants.js @@ -20,12 +20,13 @@ export const VISIBILITY = { }; export const BATCH_SIZE = 50; -export const DEFAULT_CONFIG_PATH = '/root/ot-node/current/config/config.json'; -export const NODERC_CONFIG_PATH = '/root/ot-node/.origintrail_noderc'; -export const DATA_MIGRATION_DIR = '/root/ot-node/data/data-migration'; -export const LOG_DIR = '/root/ot-node/data/data-migration/logs'; -export const ENV_PATH = '/root/ot-node/current/.env'; -export const MIGRATION_DIR = '/root/ot-node/data/migrations/'; +export const MAIN_DIR = '/root'; +export const DEFAULT_CONFIG_PATH = `${MAIN_DIR}/ot-node/current/config/config.json`; +export const NODERC_CONFIG_PATH = `${MAIN_DIR}/ot-node/.origintrail_noderc`; +export const DATA_MIGRATION_DIR = `${MAIN_DIR}/ot-node/data/data-migration`; +export const LOG_DIR = `${MAIN_DIR}/ot-node/data/data-migration/logs`; +export const ENV_PATH = `${MAIN_DIR}/ot-node/current/.env`; +export const MIGRATION_DIR = `${MAIN_DIR}/ot-node/data/migrations/`; export const MIGRATION_PROGRESS_FILE = 'v8DataMigration'; export const DB_URLS = { diff --git a/v8-data-migration/run-data-migration.sh b/v8-data-migration/run-data-migration.sh index 3b8d1c243..9e46cab41 100644 --- a/v8-data-migration/run-data-migration.sh +++ b/v8-data-migration/run-data-migration.sh @@ -1,3 +1,5 @@ -cd /root/ot-node/current/v8-data-migration/ && +MAIN_DIR=/root + +cd $MAIN_DIR/ot-node/current/v8-data-migration/ && npm rebuild sqlite3 && -nohup node v8-data-migration.js > /root/ot-node/data/nohup.out 2>&1 & \ No newline at end of file +nohup node v8-data-migration.js > $MAIN_DIR/ot-node/data/nohup.out 2>&1 & \ No newline at end of file From 8ad352181bbae97609a5a3dc7522669d370d1d96 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Mon, 30 Dec 2024 11:18:44 +0100 Subject: [PATCH 05/27] Add redownloads if db download fails or its integrity is compromised --- v8-data-migration/sqlite-utils.js | 17 +++ v8-data-migration/v8-data-migration-utils.js | 14 ++ v8-data-migration/v8-data-migration.js | 140 ++++++++++++------- 3 files changed, 117 insertions(+), 54 deletions(-) diff --git a/v8-data-migration/sqlite-utils.js b/v8-data-migration/sqlite-utils.js index 3bc682fb2..c682d6a11 100644 --- a/v8-data-migration/sqlite-utils.js +++ b/v8-data-migration/sqlite-utils.js @@ -27,6 +27,23 @@ export class SqliteDatabase { } } + async checkIntegrity() { + this._validateConnection(); + + try { + const result = await this.db.get('PRAGMA integrity check'); + if (result.integrity_check === 'ok') { + logger.info('Database integrity check passed.'); + return true; + } + logger.error('Database integrity check failed:', result.integrity_check); + return false; + } catch (error) { + logger.error('Error during integrity check:', error.message); + return false; + } + } + async getTableExists(blockchainName) { this._validateConnection(); this._validateBlockchainName(blockchainName); diff --git a/v8-data-migration/v8-data-migration-utils.js b/v8-data-migration/v8-data-migration-utils.js index 2a8f1731c..ff0e97155 100644 --- a/v8-data-migration/v8-data-migration-utils.js +++ b/v8-data-migration/v8-data-migration-utils.js @@ -73,3 +73,17 @@ export function markMigrationAsSuccessfull() { // close file fs.closeSync(file); } + +export function deleteFile(filePath) { + if (fs.existsSync(filePath)) { + fs.unlinkSync(filePath); + logger.info(`Deleted file: ${filePath}`); + + if (fs.existsSync(filePath)) { + logger.error(`File: ${filePath} still exists after deletion.`); + process.exit(1); + } + } else { + logger.info(`Did not delete file: ${filePath} because it does not exist.`); + } +} diff --git a/v8-data-migration/v8-data-migration.js b/v8-data-migration/v8-data-migration.js index 7dff4b6b4..845a1342a 100644 --- a/v8-data-migration/v8-data-migration.js +++ b/v8-data-migration/v8-data-migration.js @@ -10,6 +10,7 @@ import { ensureDirectoryExists, ensureMigrationProgressFileExists, markMigrationAsSuccessfull, + deleteFile, } from './v8-data-migration-utils.js'; import { getAssertionFromV6TripleStore, @@ -263,6 +264,69 @@ async function getAssertionsInBatch( return v6Assertions; } +async function downloadDb(dbFilePath) { + logger.time(`Database file downloading time`); + const maxAttempts = 3; + for (let i = 0; i < maxAttempts; i += 1) { + // Fetch the db file from the remote server + logger.info( + `Fetching ${process.env.NODE_ENV}.db file from ${DB_URLS[process.env.NODE_ENV]}. Try ${ + i + 1 + } of 3. This may take a while...`, + ); + + try { + const writer = fs.createWriteStream(dbFilePath); + const response = await axios({ + url: DB_URLS[process.env.NODE_ENV], + method: 'GET', + responseType: 'stream', + }); + + // Pipe the response stream to the file + response.data.pipe(writer); + + await new Promise((resolve, reject) => { + let downloadComplete = false; + + response.data.on('end', () => { + downloadComplete = true; + }); + + writer.on('finish', resolve); + writer.on('error', (err) => + reject(new Error(`Write stream error: ${err.message}`)), + ); + response.data.on('error', (err) => + reject(new Error(`Download stream error: ${err.message}`)), + ); + response.data.on('close', () => { + if (!downloadComplete) { + reject(new Error('Download stream closed before completing')); + } + }); + }); + if (fs.existsSync(dbFilePath)) { + logger.info(`DB file downloaded successfully`); + break; + } + logger.error(`DB file for ${process.env.NODE_ENV} is not present after download.`); + } catch (error) { + logger.error(`Error downloading DB file: ${error.message}`); + } + + logger.info('Deleting downloaded db file to prevent data corruption'); + deleteFile(dbFilePath); + + if (i === maxAttempts - 1) { + logger.error('Max db download attempts reached. Terminating process...'); + process.exit(1); + } + logger.info(`Retrying db download...`); + } + logger.timeEnd(`Database file downloading time`); +} + async function main() { ensureMigrationProgressFileExists(); @@ -318,66 +382,34 @@ async function main() { // Ensure connections await ensureConnections(tripleStoreRepositories, tripleStoreImplementation); - // Check if db exists and if it doesn't download it to the relevant directory - const dbFilePath = path.join(DATA_MIGRATION_DIR, `${process.env.NODE_ENV}.db`); - if (!fs.existsSync(dbFilePath)) { - logger.info( - `DB file for ${process.env.NODE_ENV} does not exist in ${DATA_MIGRATION_DIR}. Downloading it...`, - ); - // Fetch the db file from the remote server - logger.info( - `Fetching ${process.env.NODE_ENV}.db file from ${ - DB_URLS[process.env.NODE_ENV] - }. This may take a while...`, - ); - logger.time(`Database file downloading time`); - const writer = fs.createWriteStream(dbFilePath); - const response = await axios({ - url: DB_URLS[process.env.NODE_ENV], - method: 'GET', - responseType: 'stream', - }); - - // Pipe the response stream to the file - response.data.pipe(writer); - // Wait for the file to finish downloading - try { - await new Promise((resolve, reject) => { - let downloadComplete = false; + const maxAttempts = 2; + for (let i = 0; i < maxAttempts; i += 1) { + // Check if db exists and if it doesn't download it to the relevant directory + const dbFilePath = path.join(DATA_MIGRATION_DIR, `${process.env.NODE_ENV}.db`); + if (!fs.existsSync(dbFilePath)) { + logger.info( + `DB file for ${process.env.NODE_ENV} does not exist in ${DATA_MIGRATION_DIR}. Downloading it...`, + ); + await downloadDb(dbFilePath); + } - response.data.on('end', () => { - downloadComplete = true; - }); + logger.info('Initializing SQLite database'); + await sqliteDb.initialize(); - writer.on('finish', resolve); - writer.on('error', (err) => - reject(new Error(`Write stream error: ${err.message}`)), - ); - response.data.on('error', (err) => - reject(new Error(`Download stream error: ${err.message}`)), - ); - response.data.on('close', () => { - if (!downloadComplete) { - reject(new Error('Download stream closed before completing')); - } - }); - }); - } catch (error) { - logger.error(`Critical error during download: ${error.message}`); - logger.error('Terminating process to prevent data corruption'); - process.exit(1); - } - logger.timeEnd(`Database file downloading time`); + // Check if db is corrupted and handle accordingly + const integrityCheck = await sqliteDb.checkIntegrity(); + if (!integrityCheck) { + logger.info('Db integrity check failed. Deleting corrupt db file.'); + deleteFile(dbFilePath); - if (!fs.existsSync(dbFilePath)) { - throw new Error(`DB file for ${process.env.NODE_ENV} could not be created.`); + if (i === maxAttempts - 1) { + logger.error('Db integrity check failed. Terminating process...'); + process.exit(1); + } + logger.info(`Retrying db download and integrity check...`); } } - // Initialize SQLite database once before processing blockchains - logger.info('Initializing SQLite database'); - await sqliteDb.initialize(); - try { // make sure blockchains are always migrated in this order - base, gnosis, neuroweb const sortedBlockchains = Object.keys(blockchainConfig.implementation).sort(); From 5967efc7260808e3ab02bc0e16a7210a02761d21 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Mon, 30 Dec 2024 11:53:56 +0100 Subject: [PATCH 06/27] fix integrity check typo --- v8-data-migration/sqlite-utils.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v8-data-migration/sqlite-utils.js b/v8-data-migration/sqlite-utils.js index c682d6a11..d710a4c77 100644 --- a/v8-data-migration/sqlite-utils.js +++ b/v8-data-migration/sqlite-utils.js @@ -31,7 +31,7 @@ export class SqliteDatabase { this._validateConnection(); try { - const result = await this.db.get('PRAGMA integrity check'); + const result = await this.db.get('PRAGMA integrity_check;'); if (result.integrity_check === 'ok') { logger.info('Database integrity check passed.'); return true; From 5e80bb6bd566986390e78ab2e55ef2a2c1d6c242 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:00:10 +0100 Subject: [PATCH 07/27] Fix add indexes to tables migration --- .../20241105160000-add-indexes-to-tables.js | 127 +++++++++--------- 1 file changed, 61 insertions(+), 66 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241105160000-add-indexes-to-tables.js b/src/modules/repository/implementation/sequelize/migrations/20241105160000-add-indexes-to-tables.js index 047788728..249128004 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241105160000-add-indexes-to-tables.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241105160000-add-indexes-to-tables.js @@ -74,79 +74,74 @@ export async function up({ context: { queryInterface } }) { const { table, column, name } = index; // eslint-disable-next-line no-await-in-loop - const [results] = await queryInterface.sequelize.query(` - SELECT COUNT(1) AS count - FROM INFORMATION_SCHEMA.STATISTICS - WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = '${table}' AND INDEX_NAME = '${name}'; + const [indexExists] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS index_exists + FROM information_schema.statistics + WHERE table_schema = DATABASE() + AND table_name = '${table}' + AND index_name = '${name}'; `); - - if (results[0].count === 0) { + if (indexExists[0].index_exists === 0) { // eslint-disable-next-line no-await-in-loop - await queryInterface.addIndex(table, column, { name }); + await queryInterface.sequelize.query(` + CREATE INDEX \`${name}\` + ON \`${table}\` (${column.map((col) => `\`${col}\``).join(', ')}); + `); } } } export async function down({ context: { queryInterface } }) { - await queryInterface.removeIndex('shard', 'shard_blockchain_id_index'); - - await queryInterface.removeIndex('shard', 'last_dialed_index'); - - await queryInterface.removeIndex('paranet_synced_asset', 'paranet_synced_asset_ual_index'); - - await queryInterface.removeIndex('paranet_synced_asset', 'paranet_ual_data_source_index'); - - await queryInterface.removeIndex('paranet', 'blockchain_id_paranet_id_index'); - - await queryInterface.removeIndex('missed_paranet_asset', 'paranet_ual_index'); - - await queryInterface.removeIndex('missed_paranet_asset', 'missed_paranet_asset_ual_index'); - - await queryInterface.removeIndex('event', 'name_timestamp_index'); - - await queryInterface.removeIndex('event', 'event_operation_id_index'); - - await queryInterface.removeIndex('commands', 'name_status_index'); - - await queryInterface.removeIndex('commands', 'status_started_at_index'); - - await queryInterface.removeIndex('get', 'get_operation_id_index'); - - await queryInterface.removeIndex('publish', 'publish_operation_id_index'); - - await queryInterface.removeIndex('update', 'update_operation_id_index'); - - await queryInterface.removeIndex('publish_paranet', 'publish_paranet_operation_id_index'); - - await queryInterface.removeIndex('get', 'get_created_at_index'); - - await queryInterface.removeIndex('publish', 'publish_created_at_index'); - - await queryInterface.removeIndex('update', 'update_created_at_index'); - - await queryInterface.removeIndex('publish_paranet', 'publish_paranet_created_at_index'); - - await queryInterface.removeIndex('get_response', 'get_response_operation_id_index'); - - await queryInterface.removeIndex('publish_response', 'publish_response_operation_id_index'); - - await queryInterface.removeIndex('update_response', 'update_response_operation_id_index'); - - await queryInterface.removeIndex( - 'publish_paranet_response', - 'publish_paranet_response_operation_id_index', - ); - - await queryInterface.removeIndex('get_response', 'get_response_created_at_index'); - - await queryInterface.removeIndex('publish_response', 'publish_response_created_at_index'); - - await queryInterface.removeIndex('update_response', 'update_response_created_at_index'); + const indexes = [ + { table: 'shard', name: 'shard_blockchain_id_index' }, + { table: 'shard', name: 'last_dialed_index' }, + { table: 'paranet_synced_asset', name: 'paranet_synced_asset_ual_index' }, + { table: 'paranet_synced_asset', name: 'paranet_ual_data_source_index' }, + { table: 'paranet', name: 'blockchain_id_paranet_id_index' }, + { table: 'missed_paranet_asset', name: 'paranet_ual_index' }, + { table: 'missed_paranet_asset', name: 'missed_paranet_asset_ual_index' }, + { table: 'event', name: 'name_timestamp_index' }, + { table: 'event', name: 'event_operation_id_index' }, + { table: 'commands', name: 'name_status_index' }, + { table: 'commands', name: 'status_started_at_index' }, + { table: 'get', name: 'get_operation_id_index' }, + { table: 'publish', name: 'publish_operation_id_index' }, + { table: 'update', name: 'update_operation_id_index' }, + { table: 'publish_paranet', name: 'publish_paranet_operation_id_index' }, + { table: 'get', name: 'get_created_at_index' }, + { table: 'publish', name: 'publish_created_at_index' }, + { table: 'update', name: 'update_created_at_index' }, + { table: 'publish_paranet', name: 'publish_paranet_created_at_index' }, + { table: 'get_response', name: 'get_response_operation_id_index' }, + { table: 'publish_response', name: 'publish_response_operation_id_index' }, + { table: 'update_response', name: 'update_response_operation_id_index' }, + { + table: 'publish_paranet_response', + name: 'publish_paranet_response_operation_id_index', + }, + { table: 'get_response', name: 'get_response_created_at_index' }, + { table: 'publish_response', name: 'publish_response_created_at_index' }, + { table: 'update_response', name: 'update_response_created_at_index' }, + { + table: 'publish_paranet_response', + name: 'publish_paranet_response_created_at_index', + }, + { table: 'blockchain', name: 'contract_index' }, + ]; - await queryInterface.removeIndex( - 'publish_paranet_response', - 'publish_paranet_response_created_at_index', - ); + for (const { table, name } of indexes) { + // eslint-disable-next-line no-await-in-loop + const [indexExists] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS index_exists + FROM information_schema.statistics + WHERE table_schema = DATABASE() + AND table_name = '${table}' + AND index_name = '${name}'; + `); - await queryInterface.removeIndex('blockchain', 'contract_index'); + if (indexExists[0].index_exists > 0) { + // eslint-disable-next-line no-await-in-loop + await queryInterface.removeIndex(table, name); + } + } } From 6951974b9bbff5d5047827739a248d0bfc800bfa Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:12:00 +0100 Subject: [PATCH 08/27] Fix renam keywork column to datasetroot in repsonse tables --- ...word-column-to-datasetroot-in-responses.js | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241125151200-rename-keyword-column-to-datasetroot-in-responses.js b/src/modules/repository/implementation/sequelize/migrations/20241125151200-rename-keyword-column-to-datasetroot-in-responses.js index 0ac4d26f1..7f8046cbf 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241125151200-rename-keyword-column-to-datasetroot-in-responses.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241125151200-rename-keyword-column-to-datasetroot-in-responses.js @@ -1,9 +1,30 @@ export async function up({ context: { queryInterface } }) { - await queryInterface.renameColumn('publish_response', 'keyword', 'dataset_root'); - await queryInterface.renameColumn('get_response', 'keyword', 'dataset_root'); + // Helper function to check if a column exists + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('publish_response', 'keyword')) { + await queryInterface.renameColumn('publish_response', 'keyword', 'dataset_root'); + } + + if (await columnExists('get_response', 'keyword')) { + await queryInterface.renameColumn('get_response', 'keyword', 'dataset_root'); + } } export async function down({ context: { queryInterface } }) { - await queryInterface.renameColumn('publish_response', 'dataset_root', 'keyword'); - await queryInterface.renameColumn('get_response', 'dataset_root', 'keyword'); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('publish_response', 'dataset_root')) { + await queryInterface.renameColumn('publish_response', 'dataset_root', 'keyword'); + } + + if (await columnExists('get_response', 'dataset_root')) { + await queryInterface.renameColumn('get_response', 'dataset_root', 'keyword'); + } } From 7f2789fd8acb071b4a41510266b92ed24a683d0f Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:15:11 +0100 Subject: [PATCH 09/27] Fix add commands priority migration --- .../20241126114400-add-commands-priority.js | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241126114400-add-commands-priority.js b/src/modules/repository/implementation/sequelize/migrations/20241126114400-add-commands-priority.js index 09e969fb8..d397c5235 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241126114400-add-commands-priority.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241126114400-add-commands-priority.js @@ -1,9 +1,23 @@ export async function up({ context: { queryInterface, Sequelize } }) { - await queryInterface.addColumn('commands', 'priority', { - type: Sequelize.BIGINT, - }); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (!(await columnExists('commands', 'priority'))) { + await queryInterface.addColumn('commands', 'priority', { + type: Sequelize.BIGINT, + }); + } } export async function down({ context: { queryInterface } }) { - await queryInterface.removeColumn('commands', 'priority'); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('commands', 'priority')) { + await queryInterface.removeColumn('commands', 'priority'); + } } From 19c558a733c759019a54a23123611b7d26ec552a Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:18:20 +0100 Subject: [PATCH 10/27] Fix add commands is blocking migration --- ...20241129120000-add-commands-is_blocking.js | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241129120000-add-commands-is_blocking.js b/src/modules/repository/implementation/sequelize/migrations/20241129120000-add-commands-is_blocking.js index 16c78763b..7b47ca2cc 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241129120000-add-commands-is_blocking.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241129120000-add-commands-is_blocking.js @@ -1,9 +1,23 @@ export async function up({ context: { queryInterface, Sequelize } }) { - await queryInterface.addColumn('commands', 'is_blocking', { - type: Sequelize.BOOLEAN, - }); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (!(await columnExists('commands', 'is_blocking'))) { + await queryInterface.addColumn('commands', 'is_blocking', { + type: Sequelize.BOOLEAN, + }); + } } export async function down({ context: { queryInterface } }) { - await queryInterface.removeColumn('commands', 'is_blocking'); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('commands', 'is_blocking')) { + await queryInterface.removeColumn('commands', 'is_blocking'); + } } From 86f8e0e01c90714cc941edab08f0810ce647c319 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:19:40 +0100 Subject: [PATCH 11/27] Fix remove dataset root response table migration --- ...25800-remove-datasetroot-response-table.js | 40 ++++++++++++++----- 1 file changed, 30 insertions(+), 10 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241129125800-remove-datasetroot-response-table.js b/src/modules/repository/implementation/sequelize/migrations/20241129125800-remove-datasetroot-response-table.js index d80ed68fa..c667a26a5 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241129125800-remove-datasetroot-response-table.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241129125800-remove-datasetroot-response-table.js @@ -1,15 +1,35 @@ export async function up({ context: { queryInterface } }) { - await queryInterface.removeColumn('publish_response', 'dataset_root'); - await queryInterface.removeColumn('get_response', 'dataset_root'); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('publish_response', 'dataset_root')) { + await queryInterface.removeColumn('publish_response', 'dataset_root'); + } + + if (await columnExists('get_response', 'dataset_root')) { + await queryInterface.removeColumn('get_response', 'dataset_root'); + } } export async function down({ context: { queryInterface, Sequelize } }) { - await queryInterface.addColumn('publish_response', 'dataset_root', { - type: Sequelize.STRING, - allowNull: false, - }); - await queryInterface.addColumn('get_response', 'dataset_root', { - type: Sequelize.STRING, - allowNull: false, - }); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (!(await columnExists('publish_response', 'dataset_root'))) { + await queryInterface.addColumn('publish_response', 'dataset_root', { + type: Sequelize.STRING, + allowNull: false, + }); + } + + if (!(await columnExists('get_response', 'dataset_root'))) { + await queryInterface.addColumn('get_response', 'dataset_root', { + type: Sequelize.STRING, + allowNull: false, + }); + } } From 0ca8deb0b27cc9058ea81268c3f123a484335d4d Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:22:09 +0100 Subject: [PATCH 12/27] Fix update blockchain events migration --- ...20241201152000-update-blockchain-events.js | 77 +++++++++++++------ 1 file changed, 55 insertions(+), 22 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241201152000-update-blockchain-events.js b/src/modules/repository/implementation/sequelize/migrations/20241201152000-update-blockchain-events.js index d4909d2ca..4109dd20b 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241201152000-update-blockchain-events.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241201152000-update-blockchain-events.js @@ -1,37 +1,70 @@ export async function up({ context: { queryInterface, Sequelize } }) { - await queryInterface.renameColumn('blockchain_event', 'blockchain_id', 'blockchain'); + // Helper function to check if a column exists + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } - await queryInterface.changeColumn('blockchain_event', 'block', { - type: Sequelize.BIGINT, - }); + if (await columnExists('blockchain_event', 'blockchain_id')) { + await queryInterface.renameColumn('blockchain_event', 'blockchain_id', 'blockchain'); + } - await queryInterface.renameColumn('blockchain_event', 'block', 'block_number'); + if (await columnExists('blockchain_event', 'block')) { + await queryInterface.changeColumn('blockchain_event', 'block', { + type: Sequelize.BIGINT, + }); - await queryInterface.addColumn('blockchain_event', 'transaction_index', { - type: Sequelize.BIGINT, - }); + await queryInterface.renameColumn('blockchain_event', 'block', 'block_number'); + } - await queryInterface.addColumn('blockchain_event', 'log_index', { - type: Sequelize.BIGINT, - }); + if (!(await columnExists('blockchain_event', 'transaction_index'))) { + await queryInterface.addColumn('blockchain_event', 'transaction_index', { + type: Sequelize.BIGINT, + }); + } - await queryInterface.addColumn('blockchain_event', 'contract_address', { - type: Sequelize.STRING, - }); + if (!(await columnExists('blockchain_event', 'log_index'))) { + await queryInterface.addColumn('blockchain_event', 'log_index', { + type: Sequelize.BIGINT, + }); + } + + if (!(await columnExists('blockchain_event', 'contract_address'))) { + await queryInterface.addColumn('blockchain_event', 'contract_address', { + type: Sequelize.STRING, + }); + } } export async function down({ context: { queryInterface, Sequelize } }) { - await queryInterface.renameColumn('blockchain_event', 'block_number', 'block'); + async function columnExists(table, column) { + const tableDescription = await queryInterface.describeTable(table); + return Object.prototype.hasOwnProperty.call(tableDescription, column); + } + + if (await columnExists('blockchain_event', 'block_number')) { + await queryInterface.renameColumn('blockchain_event', 'block_number', 'block'); + } - await queryInterface.changeColumn('blockchain_event', 'block', { - type: Sequelize.INTEGER, - }); + if (await columnExists('blockchain_event', 'block')) { + await queryInterface.changeColumn('blockchain_event', 'block', { + type: Sequelize.INTEGER, + }); + } - await queryInterface.renameColumn('blockchain_event', 'blockchain', 'blockchain_id'); + if (await columnExists('blockchain_event', 'blockchain')) { + await queryInterface.renameColumn('blockchain_event', 'blockchain', 'blockchain_id'); + } - await queryInterface.removeColumn('blockchain_event', 'transaction_index'); + if (await columnExists('blockchain_event', 'transaction_index')) { + await queryInterface.removeColumn('blockchain_event', 'transaction_index'); + } - await queryInterface.removeColumn('blockchain_event', 'log_index'); + if (await columnExists('blockchain_event', 'log_index')) { + await queryInterface.removeColumn('blockchain_event', 'log_index'); + } - await queryInterface.removeColumn('blockchain_event', 'contract_address'); + if (await columnExists('blockchain_event', 'contract_address')) { + await queryInterface.removeColumn('blockchain_event', 'contract_address'); + } } From 17ad6fab241a4061f1474d1b7545a381a2007341 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Mon, 30 Dec 2024 12:35:04 +0100 Subject: [PATCH 13/27] add db close if integrity check fails --- v8-data-migration/v8-data-migration.js | 1 + 1 file changed, 1 insertion(+) diff --git a/v8-data-migration/v8-data-migration.js b/v8-data-migration/v8-data-migration.js index 845a1342a..5ac38ba30 100644 --- a/v8-data-migration/v8-data-migration.js +++ b/v8-data-migration/v8-data-migration.js @@ -399,6 +399,7 @@ async function main() { // Check if db is corrupted and handle accordingly const integrityCheck = await sqliteDb.checkIntegrity(); if (!integrityCheck) { + await sqliteDb.close(); logger.info('Db integrity check failed. Deleting corrupt db file.'); deleteFile(dbFilePath); From bb21b8e8b8ad2fb004938138a8de5f28cf0434a4 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 12:44:46 +0100 Subject: [PATCH 14/27] Fix rename ask migration --- .../migrations/20241211204400-rename-ask.js | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/src/modules/repository/implementation/sequelize/migrations/20241211204400-rename-ask.js b/src/modules/repository/implementation/sequelize/migrations/20241211204400-rename-ask.js index 954f87a84..801966fa1 100644 --- a/src/modules/repository/implementation/sequelize/migrations/20241211204400-rename-ask.js +++ b/src/modules/repository/implementation/sequelize/migrations/20241211204400-rename-ask.js @@ -1,9 +1,39 @@ export async function up({ context: { queryInterface } }) { - await queryInterface.renameTable('finality', 'ask'); - await queryInterface.renameTable('finality_response', 'ask_response'); + async function tableExists(table) { + const [results] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS table_exists + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = '${table}'; + `); + return results[0].table_exists > 0; + } + + if (await tableExists('finality')) { + await queryInterface.renameTable('finality', 'ask'); + } + + if (await tableExists('finality_response')) { + await queryInterface.renameTable('finality_response', 'ask_response'); + } } export async function down({ context: { queryInterface } }) { - await queryInterface.renameTable('ask', 'finality'); - await queryInterface.renameTable('ask_response', 'finality_response'); + async function tableExists(table) { + const [results] = await queryInterface.sequelize.query(` + SELECT COUNT(*) AS table_exists + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = '${table}'; + `); + return results[0].table_exists > 0; + } + + if (await tableExists('ask')) { + await queryInterface.renameTable('ask', 'finality'); + } + + if (await tableExists('ask_response')) { + await queryInterface.renameTable('ask_response', 'finality_response'); + } } From 2f70f82b8351b0424b92514057cedd0d7fb6fb94 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Mon, 30 Dec 2024 13:10:47 +0100 Subject: [PATCH 15/27] Exit loop when db is valid --- v8-data-migration/v8-data-migration.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/v8-data-migration/v8-data-migration.js b/v8-data-migration/v8-data-migration.js index 5ac38ba30..82437c6fb 100644 --- a/v8-data-migration/v8-data-migration.js +++ b/v8-data-migration/v8-data-migration.js @@ -408,7 +408,9 @@ async function main() { process.exit(1); } logger.info(`Retrying db download and integrity check...`); + continue; } + break; } try { From ae890e8e7de8004c355d673dd92b2f2158101af9 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 13:22:08 +0100 Subject: [PATCH 16/27] Dont store gateway publisher signature --- .../local-store/local-store-command.js | 65 ++++++++++++------- .../protocols/common/find-shard-command.js | 1 + .../sender/publish-find-shard-command.js | 2 +- 3 files changed, 42 insertions(+), 26 deletions(-) diff --git a/src/commands/local-store/local-store-command.js b/src/commands/local-store/local-store-command.js index 37f2bd2a1..0bd10951e 100644 --- a/src/commands/local-store/local-store-command.js +++ b/src/commands/local-store/local-store-command.js @@ -40,6 +40,7 @@ class LocalStoreCommand extends Command { tokenId, minimumNumberOfNodeReplications, batchSize, + nodePartOfShard, } = command.data; try { @@ -191,21 +192,27 @@ class LocalStoreCommand extends Command { return Command.empty(); } - - const identityId = await this.blockchainModuleManager.getIdentityId(blockchain); - const { v, r, s, vs } = await this.signatureService.signMessage( - blockchain, - datasetRoot, - ); - await this.signatureService.addSignatureToStorage( - NETWORK_SIGNATURES_FOLDER, - operationId, - identityId, - v, - r, - s, - vs, - ); + let v; + let r; + let s; + let vs; + let identityId; + if (nodePartOfShard) { + identityId = await this.blockchainModuleManager.getIdentityId(blockchain); + ({ v, r, s, vs } = await this.signatureService.signMessage( + blockchain, + datasetRoot, + )); + await this.signatureService.addSignatureToStorage( + NETWORK_SIGNATURES_FOLDER, + operationId, + identityId, + v, + r, + s, + vs, + ); + } const { v: publisherNodeV, @@ -239,16 +246,24 @@ class LocalStoreCommand extends Command { batchSize: batchSizePar, minAckResponses, }; - - await this.operationService.processResponse( - { ...command, data: updatedData }, - OPERATION_REQUEST_STATUS.COMPLETED, - { - messageType: NETWORK_MESSAGE_TYPES.RESPONSES.ACK, - messageData: { identityId, v, r, s, vs }, - }, - null, - ); + if (nodePartOfShard) { + await this.operationService.processResponse( + { ...command, data: updatedData }, + OPERATION_REQUEST_STATUS.COMPLETED, + { + messageType: NETWORK_MESSAGE_TYPES.RESPONSES.ACK, + messageData: { identityId, v, r, s, vs }, + }, + null, + ); + } else { + await this.operationService.processResponse( + { ...command, data: updatedData }, + OPERATION_REQUEST_STATUS.FAILED, + {}, + 'Node is not part of the shard.', + ); + } } catch (e) { await this.handleError(operationId, blockchain, e.message, this.errorType, true); return Command.empty(); diff --git a/src/commands/protocols/common/find-shard-command.js b/src/commands/protocols/common/find-shard-command.js index 34a646216..8530e0314 100644 --- a/src/commands/protocols/common/find-shard-command.js +++ b/src/commands/protocols/common/find-shard-command.js @@ -114,6 +114,7 @@ class FindShardCommand extends Command { return this.continueSequence( { ...command.data, + nodePartOfShard, leftoverNodes: shardNodes, numberOfFoundNodes: shardNodes.length + (nodePartOfShard ? 1 : 0), }, diff --git a/src/commands/protocols/publish/sender/publish-find-shard-command.js b/src/commands/protocols/publish/sender/publish-find-shard-command.js index 1d9bff3af..300e45cbb 100644 --- a/src/commands/protocols/publish/sender/publish-find-shard-command.js +++ b/src/commands/protocols/publish/sender/publish-find-shard-command.js @@ -24,7 +24,7 @@ class PublishFindShardCommand extends FindShardCommand { sequence.push('publishValidateAssetCommand'); } - if (nodePartOfShard && !commandData.isOperationV0) { + if (!commandData.isOperationV0) { sequence.push('localStoreCommand'); } else { sequence.push('networkPublishCommand'); From 38671efd04feacfb8e664903531fd3c4ecf21c4b Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 13:43:34 +0100 Subject: [PATCH 17/27] Fix local-store command --- src/commands/local-store/local-store-command.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/commands/local-store/local-store-command.js b/src/commands/local-store/local-store-command.js index 0bd10951e..732fc4c4f 100644 --- a/src/commands/local-store/local-store-command.js +++ b/src/commands/local-store/local-store-command.js @@ -196,9 +196,8 @@ class LocalStoreCommand extends Command { let r; let s; let vs; - let identityId; + const identityId = await this.blockchainModuleManager.getIdentityId(blockchain); if (nodePartOfShard) { - identityId = await this.blockchainModuleManager.getIdentityId(blockchain); ({ v, r, s, vs } = await this.signatureService.signMessage( blockchain, datasetRoot, From ebc47a268a69ac772e56fd80572624cb41ac2990 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 13:51:51 +0100 Subject: [PATCH 18/27] Save finality ACK only if node iteslf is part of the sard --- .../publish/read-cached-publish-data-command.js | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/commands/protocols/publish/read-cached-publish-data-command.js b/src/commands/protocols/publish/read-cached-publish-data-command.js index 504a92bbc..d99b1b6e7 100644 --- a/src/commands/protocols/publish/read-cached-publish-data-command.js +++ b/src/commands/protocols/publish/read-cached-publish-data-command.js @@ -14,6 +14,7 @@ class ReadCachedPublishDataCommand extends Command { this.fileService = ctx.fileService; this.repositoryModuleManager = ctx.repositoryModuleManager; this.networkModuleManager = ctx.networkModuleManager; + this.shardingTableService = ctx.shardingTableService; this.errorType = ERROR_TYPE.STORE_ASSERTION_ERROR; } @@ -43,11 +44,18 @@ class ReadCachedPublishDataCommand extends Command { const myPeerId = this.networkModuleManager.getPeerId().toB58String(); if (cachedData.remotePeerId === myPeerId) { - await this.repositoryModuleManager.saveFinalityAck( - publishOperationId, - ual, - cachedData.remotePeerId, + const isHostNodePartOfShard = await this.shardingTableService.isNodePartOfShard( + blockchain, + myPeerId, ); + // Save finality ACK only if node iteslf is part of the sard + if (isHostNodePartOfShard) { + await this.repositoryModuleManager.saveFinalityAck( + publishOperationId, + ual, + cachedData.remotePeerId, + ); + } } else { command.sequence.push('findPublisherNodeCommand', 'networkFinalityCommand'); } From 9957b87c9ad5d424c4f0e26cd33f50f4849835ea Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 14:10:44 +0100 Subject: [PATCH 19/27] Enable local get if node is not part of the shard --- src/commands/protocols/get/sender/get-find-shard-command.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commands/protocols/get/sender/get-find-shard-command.js b/src/commands/protocols/get/sender/get-find-shard-command.js index f3f0e5842..ed1ab7a74 100644 --- a/src/commands/protocols/get/sender/get-find-shard-command.js +++ b/src/commands/protocols/get/sender/get-find-shard-command.js @@ -21,9 +21,9 @@ class GetFindShardCommand extends FindShardCommand { // eslint-disable-next-line no-unused-vars getOperationCommandSequence(nodePartOfShard, commandData) { const sequence = []; - if (nodePartOfShard) { - sequence.push('localGetCommand'); - } + // if (nodePartOfShard) { + sequence.push('localGetCommand'); + // } sequence.push('networkGetCommand'); return sequence; From e2a166162f165eb0efe43ded2a2cb2c23241aa9a Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 14:13:34 +0100 Subject: [PATCH 20/27] Remove commented out code --- src/commands/protocols/get/sender/get-find-shard-command.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/commands/protocols/get/sender/get-find-shard-command.js b/src/commands/protocols/get/sender/get-find-shard-command.js index ed1ab7a74..7570fee48 100644 --- a/src/commands/protocols/get/sender/get-find-shard-command.js +++ b/src/commands/protocols/get/sender/get-find-shard-command.js @@ -21,10 +21,7 @@ class GetFindShardCommand extends FindShardCommand { // eslint-disable-next-line no-unused-vars getOperationCommandSequence(nodePartOfShard, commandData) { const sequence = []; - // if (nodePartOfShard) { - sequence.push('localGetCommand'); - // } - sequence.push('networkGetCommand'); + sequence.push('localGetCommand', 'networkGetCommand'); return sequence; } From c30e1acaf83fb5e11214d773f1def0d5f3f4fbde Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 15:03:14 +0100 Subject: [PATCH 21/27] Move schedule sending network finality message after collection assertion inserted in triple store --- .../read-cached-publish-data-command.js | 11 ------ .../publish/store-assertion-command.js | 34 +++++++++++++++---- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/src/commands/protocols/publish/read-cached-publish-data-command.js b/src/commands/protocols/publish/read-cached-publish-data-command.js index 504a92bbc..e7117d0d9 100644 --- a/src/commands/protocols/publish/read-cached-publish-data-command.js +++ b/src/commands/protocols/publish/read-cached-publish-data-command.js @@ -41,17 +41,6 @@ class ReadCachedPublishDataCommand extends Command { const ual = this.ualService.deriveUAL(blockchain, contractAddress, id); - const myPeerId = this.networkModuleManager.getPeerId().toB58String(); - if (cachedData.remotePeerId === myPeerId) { - await this.repositoryModuleManager.saveFinalityAck( - publishOperationId, - ual, - cachedData.remotePeerId, - ); - } else { - command.sequence.push('findPublisherNodeCommand', 'networkFinalityCommand'); - } - return this.continueSequence( { operationId, diff --git a/src/commands/protocols/publish/store-assertion-command.js b/src/commands/protocols/publish/store-assertion-command.js index 2bc8e2b14..8f7298c0d 100644 --- a/src/commands/protocols/publish/store-assertion-command.js +++ b/src/commands/protocols/publish/store-assertion-command.js @@ -12,12 +12,21 @@ class StoreAssertionCommand extends Command { this.ualService = ctx.ualService; this.dataService = ctx.dataService; this.tripleStoreService = ctx.tripleStoreService; + this.networkModuleManager = ctx.networkModuleManager; + this.repositoryModuleManager = ctx.repositoryModuleManager; this.errorType = ERROR_TYPE.STORE_ASSERTION_ERROR; } async execute(command) { - const { operationId, ual, blockchain, assertion } = command.data; + const { + operationId, + ual, + blockchain, + assertion, + publishOperationId, + remotePeerId: publisherPeerId, + } = command.data; await this.operationIdService.updateOperationIdStatus( operationId, @@ -26,17 +35,28 @@ class StoreAssertionCommand extends Command { ); try { await this._insertAssertion(assertion, ual); + + await this.operationIdService.updateOperationIdStatus( + operationId, + blockchain, + OPERATION_ID_STATUS.PUBLISH_FINALIZATION.PUBLISH_FINALIZATION_STORE_ASSERTION_END, + ); + + const myPeerId = this.networkModuleManager.getPeerId().toB58String(); + if (publisherPeerId === myPeerId) { + await this.repositoryModuleManager.saveFinalityAck( + publishOperationId, + ual, + publisherPeerId, + ); + } else { + command.sequence.push('findPublisherNodeCommand', 'networkFinalityCommand'); + } } catch (e) { await this.handleError(operationId, blockchain, e.message, this.errorType, true); return Command.empty(); // TODO: Should it end here or do a retry? } - await this.operationIdService.updateOperationIdStatus( - operationId, - blockchain, - OPERATION_ID_STATUS.PUBLISH_FINALIZATION.PUBLISH_FINALIZATION_STORE_ASSERTION_END, - ); - return this.continueSequence(command.data, command.sequence); } From abeeee4dc2e976f56b435a4440b0faaf9c7214a9 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 15:04:28 +0100 Subject: [PATCH 22/27] Remove unsed dependencies in ReadCachedPublishDataCommand --- .../protocols/publish/read-cached-publish-data-command.js | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/commands/protocols/publish/read-cached-publish-data-command.js b/src/commands/protocols/publish/read-cached-publish-data-command.js index e7117d0d9..eef2696f2 100644 --- a/src/commands/protocols/publish/read-cached-publish-data-command.js +++ b/src/commands/protocols/publish/read-cached-publish-data-command.js @@ -10,10 +10,7 @@ class ReadCachedPublishDataCommand extends Command { constructor(ctx) { super(ctx); this.ualService = ctx.ualService; - this.dataService = ctx.dataService; this.fileService = ctx.fileService; - this.repositoryModuleManager = ctx.repositoryModuleManager; - this.networkModuleManager = ctx.networkModuleManager; this.errorType = ERROR_TYPE.STORE_ASSERTION_ERROR; } From a8d8a6bccf31ca4603f94bd1f0e173e7d8f90bea Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 16:56:20 +0100 Subject: [PATCH 23/27] Set MAX_RETRIES_READ_CACHED_PUBLISH_DATA to 10 --- src/constants/constants.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/constants/constants.js b/src/constants/constants.js index 0ae75657b..314b14ad7 100644 --- a/src/constants/constants.js +++ b/src/constants/constants.js @@ -1173,8 +1173,8 @@ export const LOCAL_INSERT_FOR_ASSET_SYNC_RETRY_DELAY = 1000; export const LOCAL_INSERT_FOR_CURATED_PARANET_MAX_ATTEMPTS = 5; export const LOCAL_INSERT_FOR_CURATED_PARANET_RETRY_DELAY = 1000; -export const MAX_RETRIES_READ_CACHED_PUBLISH_DATA = 5; -export const RETRY_DELAY_READ_CACHED_PUBLISH_DATA = 10000; +export const MAX_RETRIES_READ_CACHED_PUBLISH_DATA = 10; +export const RETRY_DELAY_READ_CACHED_PUBLISH_DATA = 10 * 1000; export const TRIPLE_STORE_REPOSITORY = { DKG: 'dkg', From 5957baa75aecb1d8aff9b51d282d211b8fc8ba74 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Mon, 30 Dec 2024 16:58:17 +0100 Subject: [PATCH 24/27] Increase default number of retries for insertKnowledgeCollection in tripleStoreService --- src/service/triple-store-service.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/service/triple-store-service.js b/src/service/triple-store-service.js index 8ab9214ac..59daaea1c 100644 --- a/src/service/triple-store-service.js +++ b/src/service/triple-store-service.js @@ -37,7 +37,7 @@ class TripleStoreService { repository, knowledgeCollectionUAL, triples, - retries = 1, + retries = 5, retryDelay = 0, ) { this.logger.info( From df6fd2635d4158110eafbc8c7d3979bb0915e0f1 Mon Sep 17 00:00:00 2001 From: Zvonimir Date: Tue, 31 Dec 2024 10:44:12 +0100 Subject: [PATCH 25/27] Rename isOldContract into isV6Contract --- .../receiver/v1.0.0/v1-0-0-handle-get-request-command.js | 4 ++-- .../protocols/get/sender/get-validate-asset-command.js | 4 ++-- src/commands/protocols/get/sender/local-get-command.js | 4 ++-- .../get/sender/v1.0.0/v1-0-0-get-request-command.js | 8 ++++---- src/constants/constants.js | 2 +- src/controllers/http-api/v0/get-http-api-controller-v0.js | 8 ++++---- src/controllers/http-api/v1/get-http-api-controller-v1.js | 8 ++++---- src/controllers/rpc/get-rpc-controller.js | 2 +- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js index 731356946..d8552ac9c 100644 --- a/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js +++ b/src/commands/protocols/get/receiver/v1.0.0/v1-0-0-handle-get-request-command.js @@ -37,7 +37,7 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { ual, includeMetadata, isOperationV0, - isOldContract, + isV6Contract, } = commandData; let { assertionId, knowledgeAssetId } = commandData; @@ -229,7 +229,7 @@ class HandleGetRequestCommand extends HandleProtocolMessageCommand { const responseData = { assertion: - (isOperationV0 || notMigrated) && isOldContract + (isOperationV0 || notMigrated) && isV6Contract ? [...(assertion.public ?? []), ...(assertion.private ?? [])] : assertion, ...(includeMetadata && metadata && { metadata }), diff --git a/src/commands/protocols/get/sender/get-validate-asset-command.js b/src/commands/protocols/get/sender/get-validate-asset-command.js index 9831b5c2c..dc65a1025 100644 --- a/src/commands/protocols/get/sender/get-validate-asset-command.js +++ b/src/commands/protocols/get/sender/get-validate-asset-command.js @@ -30,7 +30,7 @@ class GetValidateAssetCommand extends ValidateAssetCommand { knowledgeCollectionId, ual, isOperationV0, - isOldContract, + isV6Contract, } = command.data; await this.operationIdService.updateOperationIdStatus( operationId, @@ -56,7 +56,7 @@ class GetValidateAssetCommand extends ValidateAssetCommand { blockchain, ); // TODO: Update to validate knowledge asset index - if (!isOperationV0 && !isOldContract) { + if (!isOperationV0 && !isV6Contract) { const isValidUal = await this.validationService.validateUal( blockchain, contract, diff --git a/src/commands/protocols/get/sender/local-get-command.js b/src/commands/protocols/get/sender/local-get-command.js index 79d2bc472..ec7abf963 100644 --- a/src/commands/protocols/get/sender/local-get-command.js +++ b/src/commands/protocols/get/sender/local-get-command.js @@ -35,7 +35,7 @@ class LocalGetCommand extends Command { contentType, assertionId, isOperationV0, - isOldContract, + isV6Contract, } = command.data; let { knowledgeAssetId } = command.data; await this.operationIdService.updateOperationIdStatus( @@ -219,7 +219,7 @@ class LocalGetCommand extends Command { const responseData = { assertion: - (isOperationV0 || notMigrated) && isOldContract + (isOperationV0 || notMigrated) && isV6Contract ? [...(assertion?.public ?? []), ...(assertion?.private ?? [])] : assertion, ...(includeMetadata && metadata && { metadata }), diff --git a/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js b/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js index d292d9e9b..d541816bd 100644 --- a/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js +++ b/src/commands/protocols/get/sender/v1.0.0/v1-0-0-get-request-command.js @@ -51,7 +51,7 @@ class GetRequestCommand extends ProtocolRequestCommand { paranetId, isOperationV0, assertionId, - isOldContract, + isV6Contract, } = command.data; return { @@ -65,7 +65,7 @@ class GetRequestCommand extends ProtocolRequestCommand { paranetId, isOperationV0, assertionId, - isOldContract, + isV6Contract, }; } @@ -76,7 +76,7 @@ class GetRequestCommand extends ProtocolRequestCommand { knowledgeCollectionId, knowledgeAssetId, isOperationV0, - isOldContract, + isV6Contract, } = command.data; if (responseData?.assertion?.public) { @@ -103,7 +103,7 @@ class GetRequestCommand extends ProtocolRequestCommand { ...kcTools.groupNquadsBySubject(privateHashTriples, true), ); - if (!isOldContract) { + if (!isV6Contract) { try { await this.validationService.validateDatasetOnBlockchain( publicKnowledgeAssetsTriplesGrouped.map((t) => t.sort()).flat(), diff --git a/src/constants/constants.js b/src/constants/constants.js index 0ae75657b..dcc714dd4 100644 --- a/src/constants/constants.js +++ b/src/constants/constants.js @@ -1187,7 +1187,7 @@ export const TRIPLES_VISIBILITY = { ALL: 'all', }; -export const OLD_CONTENT_STORAGE_MAP = { +export const V6_CONTENT_STORAGE_MAP = { BASE_MAINNET: '0x3bdfA81079B2bA53a25a6641608E5E1E6c464597', BASE_TESTNET: '0x9e3071Dc0730CB6dd0ce42969396D716Ea33E7e1', BASE_DEVNET: '0xBe08A25dcF2B68af88501611e5456571f50327B4', diff --git a/src/controllers/http-api/v0/get-http-api-controller-v0.js b/src/controllers/http-api/v0/get-http-api-controller-v0.js index 60569f7b5..3f8385e7d 100644 --- a/src/controllers/http-api/v0/get-http-api-controller-v0.js +++ b/src/controllers/http-api/v0/get-http-api-controller-v0.js @@ -3,7 +3,7 @@ import { OPERATION_STATUS, ERROR_TYPE, TRIPLES_VISIBILITY, - OLD_CONTENT_STORAGE_MAP, + V6_CONTENT_STORAGE_MAP, } from '../../../constants/constants.js'; import BaseController from '../base-http-api-controller.js'; @@ -65,13 +65,13 @@ class GetController extends BaseController { // Get assertionId - datasetRoot // - const isOldContract = Object.values(OLD_CONTENT_STORAGE_MAP).some((ca) => + const isV6Contract = Object.values(V6_CONTENT_STORAGE_MAP).some((ca) => ca.toLowerCase().includes(contract.toLowerCase()), ); const commandSequence = []; - if (!tripleStoreMigrationAlreadyExecuted && isOldContract) { + if (!tripleStoreMigrationAlreadyExecuted && isV6Contract) { commandSequence.push('getAssertionMerkleRootCommand'); } @@ -90,7 +90,7 @@ class GetController extends BaseController { knowledgeAssetId, operationId, paranetUAL, - isOldContract, + isV6Contract, contentType: contentType ?? TRIPLES_VISIBILITY.ALL, isOperationV0: true, }, diff --git a/src/controllers/http-api/v1/get-http-api-controller-v1.js b/src/controllers/http-api/v1/get-http-api-controller-v1.js index 56cb167ac..d139c5bdb 100644 --- a/src/controllers/http-api/v1/get-http-api-controller-v1.js +++ b/src/controllers/http-api/v1/get-http-api-controller-v1.js @@ -3,7 +3,7 @@ import { OPERATION_STATUS, ERROR_TYPE, TRIPLES_VISIBILITY, - OLD_CONTENT_STORAGE_MAP, + V6_CONTENT_STORAGE_MAP, } from '../../../constants/constants.js'; import BaseController from '../base-http-api-controller.js'; @@ -66,14 +66,14 @@ class GetController extends BaseController { // Get assertionId - datasetRoot // - const isOldContract = Object.values(OLD_CONTENT_STORAGE_MAP).some((ca) => + const isV6Contract = Object.values(V6_CONTENT_STORAGE_MAP).some((ca) => ca.toLowerCase().includes(contract.toLowerCase()), ); const commandSequence = []; commandSequence.push('getValidateAssetCommand'); - if (!tripleStoreMigrationAlreadyExecuted && isOldContract) { + if (!tripleStoreMigrationAlreadyExecuted && isV6Contract) { commandSequence.push('getAssertionMerkleRootCommand'); } @@ -92,7 +92,7 @@ class GetController extends BaseController { knowledgeAssetId, operationId, paranetUAL, - isOldContract, + isV6Contract, contentType: contentType ?? TRIPLES_VISIBILITY.ALL, }, transactional: false, diff --git a/src/controllers/rpc/get-rpc-controller.js b/src/controllers/rpc/get-rpc-controller.js index d587e9144..b04812eb9 100644 --- a/src/controllers/rpc/get-rpc-controller.js +++ b/src/controllers/rpc/get-rpc-controller.js @@ -39,7 +39,7 @@ class GetController extends BaseController { paranetId: message.data.paranetId, isOperationV0: message.data.isOperationV0, assertionId: message.data.assertionId, - isOldContract: message.data.isOldContract, + isV6Contract: message.data.isV6Contract, }, transactional: false, }); From 4156b864d94491d5b5f7258c6259dc59162d0fc9 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Tue, 31 Dec 2024 11:15:21 +0100 Subject: [PATCH 26/27] Add SPARQL timeout --- src/modules/triple-store/implementation/ot-triple-store.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/modules/triple-store/implementation/ot-triple-store.js b/src/modules/triple-store/implementation/ot-triple-store.js index 53c89af9c..82b133475 100644 --- a/src/modules/triple-store/implementation/ot-triple-store.js +++ b/src/modules/triple-store/implementation/ot-triple-store.js @@ -68,9 +68,13 @@ class OtTripleStore { type: 'sparql', value: this.repositories[repository].sparqlEndpointUpdate, }, + httpTimeout: 60_000, + httpBodyTimeout: true, }; this.repositories[repository].queryContext = { sources, + httpTimeout: 60_000, + httpBodyTimeout: true, }; } } From 7f912a2180305e393dae13fec6aa3a0dba844384 Mon Sep 17 00:00:00 2001 From: Mihajlo Pavlovic Date: Tue, 31 Dec 2024 11:39:05 +0100 Subject: [PATCH 27/27] version bump --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 67bebf224..7ecb4f8b7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "origintrail_node", - "version": "8.0.0+hotfix.3", + "version": "8.0.0+hotfix.4", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "origintrail_node", - "version": "8.0.0+hotfix.3", + "version": "8.0.0+hotfix.4", "license": "ISC", "dependencies": { "@comunica/query-sparql": "^2.4.3", diff --git a/package.json b/package.json index 5d0449e3b..355baaef1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "8.0.0+hotfix.3", + "version": "8.0.0+hotfix.4", "description": "OTNode V8", "main": "index.js", "type": "module",