From 1ffb5f68a35a8b754ccd860fe3fc2591d39da729 Mon Sep 17 00:00:00 2001 From: kotlarmilos Date: Tue, 19 May 2020 09:33:22 +0200 Subject: [PATCH 1/4] Add support for sorting OTJSON version 1.1 (#1257) * Added new sorting method for OT-JSON * Refactor extractDatasetSigner method to sorts copy of a dataset instead of in place sorting * add support for otjson 1.1 version * refactor OtJsonService to return sorted copies or undefined * refactor get merkle proofs to returns otObjects * test new version of otjson * code cleanup --- modules/ImportUtilities.js | 49 ++++--- modules/OtJsonUtilities.js | 117 ++++++++++++---- modules/Utilities.js | 43 ++++++ .../dc/dc-litigation-initiate-command.js | 9 +- .../command/dc/dc-offer-finalized-command.js | 12 +- .../dh/dh-replication-import-command.js | 16 ++- .../dv/dv-data-read-response-free-command.js | 7 +- modules/service/import-service.js | 32 +++-- modules/service/replication-service.js | 7 +- .../epcis/epcis-otjson-transpiler.js | 10 +- .../transpiler/wot/wot-otjson-transpiler.js | 10 +- modules/worker/export-worker.js | 13 +- modules/worker/import-worker-controller.js | 2 - test/bdd/steps/datalayer.js | 9 +- test/bdd/steps/lib/utilities.js | 129 ------------------ test/bdd/steps/network.js | 8 +- test/modules/gs1-importer.test.js | 1 - test/modules/import-utilities.test.js | 2 - test/modules/otjson-utilities.test.js | 81 +++++++++++ 19 files changed, 336 insertions(+), 221 deletions(-) create mode 100644 test/modules/otjson-utilities.test.js diff --git a/modules/ImportUtilities.js b/modules/ImportUtilities.js index ed6fdbc59f..c1a7906410 100644 --- a/modules/ImportUtilities.js +++ b/modules/ImportUtilities.js @@ -140,8 +140,10 @@ class ImportUtilities { } static prepareDataset(originalDocument, config, web3) { - const document = originalDocument; // todo add otJsonService - const graph = document['@graph']; + let document = OtJsonUtilities.prepareDatasetForNewImport(originalDocument); + if (!document) { + document = originalDocument; + } const datasetHeader = document.datasetHeader ? document.datasetHeader : {}; ImportUtilities.calculateGraphPermissionedDataHashes(document['@graph']); const id = ImportUtilities.calculateGraphPublicHash(document); @@ -157,7 +159,7 @@ class ImportUtilities { '@id': id, '@type': 'Dataset', datasetHeader: header, - '@graph': graph, + '@graph': document['@graph'], }; const rootHash = ImportUtilities.calculateDatasetRootHash(dataset); @@ -452,9 +454,11 @@ class ImportUtilities { } static calculateDatasetRootHash(dataset) { - const datasetClone = Utilities.copyObject(dataset); - ImportUtilities.removeGraphPermissionedData(datasetClone['@graph']); - const sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingRootHash(datasetClone); + let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingRootHash(dataset); + if (!sortedDataset) { + sortedDataset = Utilities.copyObject(dataset); + } + ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']); const datasetId = sortedDataset['@id']; const datasetCreator = sortedDataset.datasetHeader.dataCreator; @@ -590,7 +594,10 @@ class ImportUtilities { * @returns {string} */ static calculateGraphPublicHash(dataset) { - const sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingGraphHash(dataset); + let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingGraphHash(dataset); + if (!sortedDataset) { + sortedDataset = Utilities.copyObject(dataset); + } ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']); return `0x${sha3_256(JSON.stringify(sortedDataset['@graph']), null, 0)}`; } @@ -627,32 +634,38 @@ class ImportUtilities { } /** - * Sign OT-JSON + * Sign dataset * @static */ - static signDataset(otjson, config, web3) { - const sortedOTJson = OtJsonUtilities.prepareDatasetForGeneratingSignature(otjson); - ImportUtilities.removeGraphPermissionedData(sortedOTJson['@graph']); + static signDataset(dataset, config, web3) { + let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingSignature(dataset); + if (!sortedDataset) { + sortedDataset = Utilities.copyObject(dataset); + } + ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']); const { signature } = web3.eth.accounts.sign( - JSON.stringify(sortedOTJson), + JSON.stringify(sortedDataset), Utilities.normalizeHex(config.node_private_key), ); - otjson.signature = { + dataset.signature = { value: signature, type: 'ethereum-signature', }; - return otjson; + return dataset; } /** * Extract Signer from OT-JSON signature * @static */ - static extractDatasetSigner(otjson, web3) { - const strippedOtjson = OtJsonUtilities.prepareDatasetForGeneratingSignature(otjson); - delete strippedOtjson.signature; - return web3.eth.accounts.recover(JSON.stringify(strippedOtjson), otjson.signature.value); + static extractDatasetSigner(dataset, web3) { + let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingSignature(dataset); + if (!sortedDataset) { + sortedDataset = Utilities.copyObject(dataset); + } + delete sortedDataset.signature; + return web3.eth.accounts.recover(JSON.stringify(sortedDataset), dataset.signature.value); } diff --git a/modules/OtJsonUtilities.js b/modules/OtJsonUtilities.js index e846e7cfe0..691a44e259 100644 --- a/modules/OtJsonUtilities.js +++ b/modules/OtJsonUtilities.js @@ -59,12 +59,12 @@ class OtJsonUtilities { case '1.0': datasetCopy = Utilities.copyObject(dataset); datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true)); - break; + return datasetCopy; + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } - - return datasetCopy; } /** @@ -85,24 +85,33 @@ class OtJsonUtilities { datasetCopy = Utilities.copyObject(dataset); OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); return datasetCopy; + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset IN PLACE so that the signature can be generated properly + * Formats the dataset for export * * @param dataset - * @returns {any}|undefined + * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is + * already formatted */ - static prepareDatasetForExtractSigner(dataset) { + static prepareDatasetForDatabaseRead(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); + let datasetCopy; + switch (version) { case '1.0': - OtJsonUtilities.sortGraphRelationsAndIdentifiers(dataset['@graph']); - break; + datasetCopy = Utilities.copyObject(dataset); + OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); + return datasetCopy; + case '1.1': + datasetCopy = Utilities.copyObject(dataset); + return JSON.parse(Utilities.sortObjectRecursively(datasetCopy)); default: throw new Error('Unsupported ot-json version!'); } @@ -125,6 +134,8 @@ class OtJsonUtilities { datasetCopy = Utilities.copyObject(dataset); OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); return JSON.parse(Utilities.sortedStringify(datasetCopy, false)); + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } @@ -147,18 +158,20 @@ class OtJsonUtilities { datasetCopy = Utilities.copyObject(dataset); datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true)); return datasetCopy; + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset for proper generating of offer challenges + * Formats the dataset for proper generating of Merkle proofs for ot-objects * @param dataset * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the * dataset it already formatted */ - static prepareDatasetForGeneratingChallenges(dataset) { + static prepareDatasetForGeneratingMerkleProofs(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); let datasetCopy; @@ -166,20 +179,23 @@ class OtJsonUtilities { switch (version) { case '1.0': datasetCopy = Utilities.copyObject(dataset); - datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true)); + OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); return datasetCopy; + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset for proper generating of Merkle proofs for ot-objects + * Formats the dataset so that the dataset can be read and imported by a Data Viewer + * * @param dataset - * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the - * dataset it already formatted + * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is + * already formatted */ - static prepareDatasetForGeneratingMerkleProofs(dataset) { + static prepareDatasetForDataRead(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); let datasetCopy; @@ -188,20 +204,22 @@ class OtJsonUtilities { case '1.0': datasetCopy = Utilities.copyObject(dataset); OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); - return datasetCopy; + return JSON.parse(Utilities.sortedStringify(datasetCopy, false)); + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset so that the dataset can be read and imported by a Data Viewer + * Formats the dataset so that the dataset can be imported to the graph database as old otJson * * @param dataset * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is * already formatted */ - static prepareDatasetForDataRead(dataset) { + static prepareDatasetForOldImport(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); let datasetCopy; @@ -211,41 +229,44 @@ class OtJsonUtilities { datasetCopy = Utilities.copyObject(dataset); OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); return JSON.parse(Utilities.sortedStringify(datasetCopy, false)); + case '1.1': + return undefined; default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset so that the dataset can be imported to the graph database + * Formats the dataset so that the dataset can be imported to the graph database as new otJson * * @param dataset * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is * already formatted */ - static prepareDatasetForImport(dataset) { + static prepareDatasetForNewImport(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); let datasetCopy; switch (version) { case '1.0': + return undefined; + case '1.1': datasetCopy = Utilities.copyObject(dataset); - OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); - return JSON.parse(Utilities.sortedStringify(datasetCopy, false)); + return JSON.parse(Utilities.sortObjectRecursively(datasetCopy)); default: throw new Error('Unsupported ot-json version!'); } } /** - * Formats the dataset so that the dataset can be exported and validated in OT-JSON format + * Formats the dataset so that the dataset can be exported and validated in old OT-JSON format * * @param dataset * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is * already formatted */ - static prepareDatasetForExport(dataset) { + static prepareDatasetForOldExport(dataset) { const version = OtJsonUtilities._getDatasetVersion(dataset); let datasetCopy; @@ -255,6 +276,54 @@ class OtJsonUtilities { datasetCopy = Utilities.copyObject(dataset); OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']); return datasetCopy; + case '1.1': + return undefined; + default: + throw new Error('Unsupported ot-json version!'); + } + } + + /** + * Formats the dataset so that the dataset can be exported and validated in new OT-JSON format + * + * @param dataset + * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is + * already formatted + */ + static prepareDatasetForNewExport(dataset) { + const version = OtJsonUtilities._getDatasetVersion(dataset); + + let datasetCopy; + + switch (version) { + case '1.0': + return undefined; + case '1.1': + datasetCopy = Utilities.copyObject(dataset); + return JSON.parse(Utilities.sortObjectRecursively(datasetCopy)); + default: + throw new Error('Unsupported ot-json version!'); + } + } + + /** + * Formats the dataset for replication import + * + * @param dataset + * @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is + * already formatted + */ + static prepareDatasetForNewReplication(dataset) { + const version = OtJsonUtilities._getDatasetVersion(dataset); + + let datasetCopy; + + switch (version) { + case '1.0': + return undefined; + case '1.1': + datasetCopy = Utilities.copyObject(dataset); + return JSON.parse(Utilities.sortObjectRecursively(datasetCopy)); default: throw new Error('Unsupported ot-json version!'); } diff --git a/modules/Utilities.js b/modules/Utilities.js index 98f3882dfd..aeca2a9ed4 100644 --- a/modules/Utilities.js +++ b/modules/Utilities.js @@ -72,6 +72,49 @@ class Utilities { return `${JSON.stringify(obj)}`; } + /** + * Optimized sort method for OTJSON 1.1 + * @param obj - Object to be serialized + * @param inProperties - Sort array items except properties + * @return {string} + */ + static sortObjectRecursively(obj, inProperties = false) { + if (obj != null && typeof obj === 'object') { + const stringified = []; + for (const key of Object.keys(obj)) { + if (Array.isArray(obj)) { + if (obj[key] != null && typeof obj[key] === 'object') { + stringified.push(this.sortedStringify(obj[key], inProperties)); + } else { + // Added for better performance by avoiding the last level of recursion + // because the last level only returns JSON.stringify of the key + stringified.push(JSON.stringify(obj[key])); + } + } else if (obj[key] != null && typeof obj[key] === 'object') { + if (key === 'properties') { inProperties = true; } + stringified.push(`"${key}":${this.sortedStringify(obj[key], inProperties)}`); + } else { + // Added for better performance by avoiding the last level of recursion + // because the last level only returns JSON.stringify of the key + stringified.push(`"${key}":${JSON.stringify(obj[key])}`); + } + } + + // Sort the object or sort the array if the sortArrays parameter is true + if (!Array.isArray(obj) || inProperties === false) { + stringified.sort(); + } + + // Return result in the format of a stringified array + if (Array.isArray(obj)) { + return `[${stringified.join(',')}]`; + } + // Return result in the format of an object + return `{${stringified.join(',')}}`; + } + return JSON.stringify(obj); + } + /** * Check if all dependencies from package.json are installed * @returns {Promise} containing error array: diff --git a/modules/command/dc/dc-litigation-initiate-command.js b/modules/command/dc/dc-litigation-initiate-command.js index 0fc8185d2f..a5f3abe630 100644 --- a/modules/command/dc/dc-litigation-initiate-command.js +++ b/modules/command/dc/dc-litigation-initiate-command.js @@ -71,15 +71,18 @@ class DCLitigationInitiateCommand extends Command { const dcIdentity = utilities.normalizeHex(this.config.erc725Identity); const otJson = await this.importService.getImport(offer.data_set_id); - let encryptedDataset = importUtilities.encryptDataset( + const encryptedDataset = importUtilities.encryptDataset( otJson, litigationPrivateKey, ); - encryptedDataset = + let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingLitigationProof(encryptedDataset); + if (!sortedDataset) { + sortedDataset = encryptedDataset; + } const merkleProof = this.challengeService.createChallengeProof( - encryptedDataset['@graph'], + sortedDataset['@graph'], objectIndex, blockIndex, ); diff --git a/modules/command/dc/dc-offer-finalized-command.js b/modules/command/dc/dc-offer-finalized-command.js index 13f068fece..beb5b84615 100644 --- a/modules/command/dc/dc-offer-finalized-command.js +++ b/modules/command/dc/dc-offer-finalized-command.js @@ -153,13 +153,17 @@ class DcOfferFinalizedCommand extends Command { const encryptionColor = this.replicationService.castNumberToColor(replicatedData.color); - let encryptedDataset = + const encryptedDataset = (await this.replicationService.loadReplication(offer.id, encryptionColor)).otJson; - encryptedDataset = - OtJsonUtilities.prepareDatasetForGeneratingChallenges(encryptedDataset); + + let sortedDataset = + OtJsonUtilities.prepareDatasetForGeneratingLitigationProof(encryptedDataset); + if (!sortedDataset) { + sortedDataset = encryptedDataset; + } const challenges = this.challengeService.generateChallenges( - encryptedDataset['@graph'], startTime, + sortedDataset['@graph'], startTime, endTime, this.config.numberOfChallenges, ); diff --git a/modules/command/dh/dh-replication-import-command.js b/modules/command/dh/dh-replication-import-command.js index 679bd1a138..53e77071fc 100644 --- a/modules/command/dh/dh-replication-import-command.js +++ b/modules/command/dh/dh-replication-import-command.js @@ -49,9 +49,16 @@ class DhReplicationImportCommand extends Command { const { otJson, permissionedData } = JSON.parse(fs.readFileSync(documentPath, { encoding: 'utf-8' })); - const { decryptedDataset, encryptedMap } = + const replication = await ImportUtilities.decryptDataset(otJson, litigationPublicKey, offerId, encColor); + let { decryptedDataset } = replication; + const { encryptedMap } = replication; + + const tempSortedDataset = OtJsonUtilities.prepareDatasetForNewReplication(decryptedDataset); + if (tempSortedDataset) { + decryptedDataset = tempSortedDataset; + } const calculatedDataSetId = await ImportUtilities.calculateGraphPublicHash(decryptedDataset); @@ -66,8 +73,11 @@ class DhReplicationImportCommand extends Command { throw Error(`Calculated root hash ${decryptedGraphRootHash} differs from Blockchain root hash ${blockchainRootHash}`); } - // Verify litigation root hash - const sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingChallenges(otJson); + let sortedDataset = + OtJsonUtilities.prepareDatasetForGeneratingLitigationProof(otJson); + if (!sortedDataset) { + sortedDataset = otJson; + } const encryptedGraphRootHash = this.challengeService.getLitigationRootHash(sortedDataset['@graph']); if (encryptedGraphRootHash !== litigationRootHash) { diff --git a/modules/command/dv/dv-data-read-response-free-command.js b/modules/command/dv/dv-data-read-response-free-command.js index 06df88659b..d6b3980c2b 100644 --- a/modules/command/dv/dv-data-read-response-free-command.js +++ b/modules/command/dv/dv-data-read-response-free-command.js @@ -121,14 +121,17 @@ class DVDataReadResponseFreeCommand extends Command { } = JSON.parse(handler.data); if (readExport) { - const fileContent = OtJsonUtilities.prepareDatasetForDataRead(document); + let sortedDataset = OtJsonUtilities.prepareDatasetForDataRead(document); + if (!sortedDataset) { + sortedDataset = document; + } const cacheDirectory = path.join(this.config.appDataPath, 'export_cache'); try { await Utilities.writeContentsToFile( cacheDirectory, handler_id, - JSON.stringify(fileContent), + JSON.stringify(sortedDataset), ); } catch (e) { const filePath = path.join(cacheDirectory, handler_id); diff --git a/modules/service/import-service.js b/modules/service/import-service.js index 1e064ab5e7..b97f52cf9a 100644 --- a/modules/service/import-service.js +++ b/modules/service/import-service.js @@ -114,8 +114,7 @@ class ImportService { document.datasetHeader = metadata.datasetHeader; document.signature = metadata.signature; - // todo add otJsonService - return OtJsonUtilities.prepareDatasetForGeneratingRootHash(document); + return OtJsonUtilities.prepareDatasetForDatabaseRead(document); } /** @@ -546,7 +545,6 @@ class ImportService { }); await this.db.addDatasetMetadata(metadata); - OtJsonUtilities.prepareDatasetForExtractSigner(document); // Extract wallet from signature. const wallet = ImportUtilities.extractDatasetSigner( document, @@ -583,25 +581,34 @@ class ImportService { * @returns {Promise<[]>} */ async getMerkleProofs(objectIdsArray, datasetId) { - let otjson = await this.getImport(datasetId); + const dataset = await this.getImport(datasetId); - otjson = OtJsonUtilities.prepareDatasetForGeneratingMerkleProofs(otjson); + let sortedDataset = + OtJsonUtilities.prepareDatasetForGeneratingMerkleProofs(dataset); + if (!sortedDataset) { + sortedDataset = dataset; + } const merkleTree = ImportUtilities.createDistributionMerkleTree( - otjson['@graph'], + sortedDataset['@graph'], datasetId, - otjson.datasetHeader.dataCreator, + sortedDataset.datasetHeader.dataCreator, ); const proofs = []; for (const objectId of objectIdsArray) { const objectIndex = - _graph(otjson).findIndex(graphObject => _id(graphObject) === objectId); + _graph(sortedDataset).findIndex(graphObject => _id(graphObject) === objectId); + + const object = + _graph(sortedDataset).find(graphObject => _id(graphObject) === objectId); const proof = merkleTree.createProof(objectIndex + 1); - proofs.push({ object_id: objectId, object_index: objectIndex + 1, proof }); + proofs.push({ + object_id: objectId, otObject: object, object_index: objectIndex + 1, proof, + }); } return proofs; @@ -620,9 +627,6 @@ class ImportService { const otObjects = []; for (let i = 0; i < reconstructedObjects.length; i += 1) { - // TODO Use sortObjectRecursively here - // eslint-disable-next-line prefer-destructuring - reconstructedObjects[i] = (OtJsonUtilities.prepareDatasetForGeneratingMerkleProofs({ '@graph': [reconstructedObjects[i]] }))['@graph'][0]; if (reconstructedObjects[i] && reconstructedObjects[i]['@id']) { otObjects.push({ otObject: reconstructedObjects[i], @@ -641,8 +645,6 @@ class ImportService { } else if (graphObject.vertexType === constants.vertexType.connector) { otObject['@type'] = constants.objectType.otConnector; } - - // todo add otJsonService return otObject; } @@ -786,7 +788,7 @@ class ImportService { // TODO: Prepare support for multiple versions const { OTJSONVersion } = datasetHeader; - if (OTJSONVersion !== '1.0') { + if (!['1.0', '1.1'].includes(OTJSONVersion)) { throw Error('[Validation Error] Unsupported OT-JSON version.'); } diff --git a/modules/service/replication-service.js b/modules/service/replication-service.js index 0e72a1d198..9a63ce03f4 100644 --- a/modules/service/replication-service.js +++ b/modules/service/replication-service.js @@ -74,8 +74,11 @@ class ReplicationService { encryptedDataset = ImportUtilities.encryptDataset(otJson, litigationKeyPair.privateKey); - const sortedDataset = - OtJsonUtilities.prepareDatasetForGeneratingChallenges(encryptedDataset); + let sortedDataset = + OtJsonUtilities.prepareDatasetForGeneratingLitigationProof(encryptedDataset); + if (!sortedDataset) { + sortedDataset = encryptedDataset; + } const litRootHash = this.challengeService.getLitigationRootHash(sortedDataset['@graph']); const distEpk = Encryption.packEPK(distributionKeyPair.publicKey); diff --git a/modules/transpiler/epcis/epcis-otjson-transpiler.js b/modules/transpiler/epcis/epcis-otjson-transpiler.js index dec32d9f13..916109b2b0 100644 --- a/modules/transpiler/epcis/epcis-otjson-transpiler.js +++ b/modules/transpiler/epcis/epcis-otjson-transpiler.js @@ -75,7 +75,10 @@ class EpcisOtJsonTranspiler { otjson['@type'] = 'Dataset'; otjson.datasetHeader = importUtilities.createDatasetHeader(this.config, transpilationInfo); - let result = otjson; // todo add otJsonService + let result = OtJsonUtilities.prepareDatasetForNewImport(otjson); + if (!result) { + result = otjson; + } result['@id'] = importUtilities.calculateGraphPublicHash(result); const merkleRoot = importUtilities.calculateDatasetRootHash(result); result.datasetHeader.dataIntegrity.proofs[0].proofValue = merkleRoot; @@ -84,7 +87,10 @@ class EpcisOtJsonTranspiler { if (this.web3) { result = importUtilities.signDataset(result, this.config, this.web3); } else { - result = OtJsonUtilities.prepareDatasetForImport(result); + const sortedDataset = OtJsonUtilities.prepareDatasetForOldImport(result); + if (sortedDataset) { + result = sortedDataset; + } } return result; } diff --git a/modules/transpiler/wot/wot-otjson-transpiler.js b/modules/transpiler/wot/wot-otjson-transpiler.js index ee42fe1708..0ed9c5566c 100644 --- a/modules/transpiler/wot/wot-otjson-transpiler.js +++ b/modules/transpiler/wot/wot-otjson-transpiler.js @@ -58,7 +58,10 @@ class WotOtJsonTranspiler { otjson['@type'] = 'Dataset'; otjson.datasetHeader = importUtilities.createDatasetHeader(this.config, transpilationInfo); - let result = otjson; // todo add otJsonService + let result = OtJsonUtilities.prepareDatasetForNewImport(otjson); + if (!result) { + result = otjson; + } result['@id'] = importUtilities.calculateGraphPublicHash(result); const merkleRoot = importUtilities.calculateDatasetRootHash(result); result.datasetHeader.dataIntegrity.proofs[0].proofValue = merkleRoot; @@ -67,7 +70,10 @@ class WotOtJsonTranspiler { if (this.web3) { result = importUtilities.signDataset(result, this.config, this.web3); } else { - result = OtJsonUtilities.prepareDatasetForImport(result); + const sortedDataset = OtJsonUtilities.prepareDatasetForOldImport(result); + if (sortedDataset) { + result = sortedDataset; + } } return result; } diff --git a/modules/worker/export-worker.js b/modules/worker/export-worker.js index e5bab5f79d..7bfd44cd92 100644 --- a/modules/worker/export-worker.js +++ b/modules/worker/export-worker.js @@ -34,11 +34,14 @@ process.on('message', async (data) => { document.datasetHeader = metadata.datasetHeader; document.signature = metadata.signature; - // todo add otJsonService + const sortedDataset = OtJsonUtilities.prepareDatasetForNewExport(document); + if (sortedDataset) { + document = sortedDataset; + } } const web3 = new Web3(new Web3.providers.HttpProvider(config.blockchain.rpc_server_url)); - OtJsonUtilities.prepareDatasetForExtractSigner(document); + const dc_node_wallet = ImportUtilities.extractDatasetSigner(document, web3); const data_creator = document.datasetHeader.dataCreator; @@ -55,7 +58,11 @@ process.on('message', async (data) => { break; } case 'ot-json': { - dataset = JSON.stringify(OtJsonUtilities.prepareDatasetForExport(document)); + let sortedDataset = OtJsonUtilities.prepareDatasetForOldExport(document); + if (!sortedDataset) { + sortedDataset = document; + } + dataset = JSON.stringify(sortedDataset); break; } default: diff --git a/modules/worker/import-worker-controller.js b/modules/worker/import-worker-controller.js index 2c2e50c437..ba31fad720 100644 --- a/modules/worker/import-worker-controller.js +++ b/modules/worker/import-worker-controller.js @@ -35,8 +35,6 @@ class ImportWorkerController { const otjson_size_in_bytes = bytes(document); document = JSON.parse(document); // Extract wallet from signature. - - OtJsonUtilities.prepareDatasetForExtractSigner(document); const wallet = ImportUtilities.extractDatasetSigner( document, this.web3, diff --git a/test/bdd/steps/datalayer.js b/test/bdd/steps/datalayer.js index cf7c4f66ab..884671ac37 100644 --- a/test/bdd/steps/datalayer.js +++ b/test/bdd/steps/datalayer.js @@ -328,13 +328,8 @@ Then(/^I calculate and validate the proof of the last traversal/, { timeout: 120 }); for (const proofData of proofResponse) { - const { otObject } = lastTrail.find((element) => { - const { object_id } = proofData; - return element.otObject['@id'] === object_id; - }); - - const { proof, object_index } = proofData; - const objectText = Utilities.sortedStringify(otObject); + const { proof, object_index, otObject } = proofData; + const objectText = JSON.stringify(otObject); const merkleTree = new MerkleTree(['1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], 'distribution', 'sha3'); const rootHash = merkleTree.calculateProofResult(proof, objectText, object_index); diff --git a/test/bdd/steps/lib/utilities.js b/test/bdd/steps/lib/utilities.js index 129b667b43..df61310bbb 100644 --- a/test/bdd/steps/lib/utilities.js +++ b/test/bdd/steps/lib/utilities.js @@ -1,14 +1,8 @@ /* eslint-disable max-len */ -const sortedStringify = require('sorted-json-stringify'); -const { sha3_256 } = require('js-sha3'); const _ = require('lodash'); const BN = require('bn.js'); -const Web3 = require('web3'); const fs = require('fs'); -// TODO: use 3rd party. -const MerkleTree = require('../../../../modules/Merkle'); - // Private functions. function _sortedStringify(obj, sortArrays = false) { @@ -36,58 +30,6 @@ function _sortedStringify(obj, sortArrays = false) { return JSON.stringify(obj); } -/** - * - * @param graph - * @return {string|*|undefined} - * @private - */ -function _sortGraphRecursively(graph) { - graph.forEach((el) => { - if (el.relations) { - el.relations.sort((r1, r2) => - sha3_256(_sortedStringify(r1)).localeCompare(sha3_256(_sortedStringify(r2)))); - } - - if (el.identifiers) { - el.identifiers.sort((r1, r2) => - sha3_256(_sortedStringify(r1)).localeCompare(sha3_256(_sortedStringify(r2)))); - } - }); - graph.sort((e1, e2) => (Object.keys(e1['@id']).length > 0 ? e1['@id'].localeCompare(e2['@id']) : 0)); - return _sortedStringify(graph); -} - -function _sortDataset(dataset) { - dataset['@graph'].forEach((el) => { - if (el.relations) { - el.relations.sort((r1, r2) => sha3_256(_sortedStringify(r1)) - .localeCompare(sha3_256(_sortedStringify(r2)))); - } - - if (el.identifiers) { - el.identifiers.sort((r1, r2) => sha3_256(_sortedStringify(r1)) - .localeCompare(sha3_256(_sortedStringify(r2)))); - } - }); - dataset['@graph'].sort((e1, e2) => e1['@id'].localeCompare(e2['@id'])); - return _sortedStringify(dataset); -} - -function _generateDatasetSummary(dataset) { - return { - datasetId: dataset['@id'], - datasetCreator: dataset.datasetHeader.dataCreator, - objects: dataset['@graph'].map(vertex => ({ - '@id': vertex['@id'], - identifiers: vertex.identifiers != null ? vertex.identifiers : [], - })), - numRelations: dataset['@graph'] - .filter(vertex => vertex.relations != null) - .reduce((acc, value) => acc + value.relations.length, 0), - }; -} - // Public functions. /** @@ -103,15 +45,6 @@ function base64Encode(file) { return Buffer.from(bitmap).toString('base64'); } -/** - * Calculate dataset ID from a given graph. - * @param graph - * @return {string} - */ -function calculateImportHash(graph) { - const sorted = _sortGraphRecursively(graph); - return `0x${sha3_256(sorted, null, 0)}`; -} /** * Normalizes hex number @@ -172,65 +105,6 @@ function isZeroHash(hash) { return num.eqn(0); } -function verifySignature(otJson, wallet) { - const { signature } = otJson; - const { accounts } = new Web3().eth; - const strippedOtjson = Object.assign({}, otJson); - delete strippedOtjson.signature; - - const stringifiedOtJson = _sortDataset(strippedOtjson); - return (wallet.toLowerCase() === accounts.recover(stringifiedOtJson, signature.value).toLowerCase()); -} - -/** - * Calculate root-hash of OT-JSON document - * @return {string} - * @param otJson - */ -function calculateRootHash(otJson) { - if (otJson == null) { - throw Error('Invalid OT JSON'); - } - - const { datasetHeader } = otJson; - if (datasetHeader == null) { - throw Error('Invalid OT JSON'); - } - - const graph = otJson['@graph']; - - if (!Array.isArray(graph)) { - throw Error('Invalid graph'); - } - if (graph.filter(v => v['@id'] == null).length > 0) { - throw Error('Invalid graph'); - } - - const datasetSummary = _generateDatasetSummary(otJson); - - graph.forEach((el) => { - if (el.relations) { - el.relations.sort((r1, r2) => sha3_256(_sortedStringify(r1)) - .localeCompare(sha3_256(_sortedStringify(r2)))); - } - - if (el.identifiers) { - el.identifiers.sort((r1, r2) => sha3_256(_sortedStringify(r1)) - .localeCompare(sha3_256(_sortedStringify(r2)))); - } - }); - - const stringifiedGraph = []; - graph.forEach(obj => stringifiedGraph.push(_sortedStringify(obj))); - - const merkle = new MerkleTree( - [_sortedStringify(datasetSummary), ...stringifiedGraph], - 'distribution', - 'sha3', - ); - - return merkle.getRoot(); -} /** * Is leaf node in the original JSON document @@ -272,14 +146,11 @@ function stringifyWithoutComments(obj) { } module.exports = { - calculateImportHash, normalizeHex, denormalizeHex, findVertexIdValue, findVertexUid, isZeroHash, - verifySignature, - calculateRootHash, base64_encode: base64Encode, stringifyWithoutComments, }; diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js index 58ac7d4bb3..997598d659 100644 --- a/test/bdd/steps/network.js +++ b/test/bdd/steps/network.js @@ -22,6 +22,9 @@ const Models = require('../../../models'); const fs = require('fs'); const xmljs = require('xml-js'); +const Web3 = require('web3'); + + // Identity difficulty 8. const bootstrapIdentity = { ff62cb1f692431d901833d55b93c7d991b4087f1: { @@ -302,7 +305,8 @@ Then(/^([DC|DV]+)'s last [import|purchase]+'s hash should be the same as one man expect(response.document, 'response.document should be in OT JSON format') .to.have.keys(['datasetHeader', '@id', '@type', '@graph', 'signature']); - expect(utilities.verifySignature(response.document, myNode.options.nodeConfiguration.node_wallet), 'Signature not valid!').to.be.true; + + expect(ImportUtilities.extractDatasetSigner(response.document, new Web3()).toLowerCase() === myNode.options.nodeConfiguration.node_wallet, 'Signature not valid!').to.be.true; const calculatedRootHash = ImportUtilities.calculateDatasetRootHash(response.document); const calculateDatasetId = ImportUtilities.calculateGraphPublicHash(response.document); @@ -331,7 +335,7 @@ Then(/^the last exported dataset signature should belong to ([DC|DV]+)$/, async expect(lastExport.data.formatted_dataset, 'response.data.formatted_dataset should be in OT JSON format') .to.have.keys(['datasetHeader', '@id', '@type', '@graph', 'signature']); - expect(utilities.verifySignature(lastExport.data.formatted_dataset, myNode.options.nodeConfiguration.node_wallet), 'Signature not valid!').to.be.true; + expect(ImportUtilities.extractDatasetSigner(lastExport.data.formatted_dataset, new Web3()).toLowerCase() === myNode.options.nodeConfiguration.node_wallet.toLowerCase(), 'Signature not valid!').to.be.true; }); Then(/^the last exported dataset should contain "([^"]*)" data as "([^"]*)"$/, async function (filePath, dataId) { diff --git a/test/modules/gs1-importer.test.js b/test/modules/gs1-importer.test.js index c04eae2af3..34e7bf8025 100644 --- a/test/modules/gs1-importer.test.js +++ b/test/modules/gs1-importer.test.js @@ -250,7 +250,6 @@ describe('GS1 Importer tests', () => { const otJsonFromDb = await importService.getImport(data_set_id); assert.isNotNull(otJsonFromDb, 'DB result is null'); - assert.deepEqual(otJson, otJsonFromDb); const sortedFirst = ImportUtilities.sortStringifyDataset(otJson); const sortedSecond = ImportUtilities.sortStringifyDataset(otJsonFromDb); diff --git a/test/modules/import-utilities.test.js b/test/modules/import-utilities.test.js index 440be91bae..48f11ffaf5 100644 --- a/test/modules/import-utilities.test.js +++ b/test/modules/import-utilities.test.js @@ -194,9 +194,7 @@ describe('Import utilities module ', () => { ImportUtilities.sortStringifyDataset(signedShuffled), ); - OtJsonUtilities.prepareDatasetForExtractSigner(signedOriginal); const signerOfOriginal = await ImportUtilities.extractDatasetSigner(signedOriginal, web3); - OtJsonUtilities.prepareDatasetForExtractSigner(signedShuffled); const signerOfShuffled = await ImportUtilities.extractDatasetSigner(signedShuffled, web3); assert.equal(signerOfOriginal, signerOfShuffled); diff --git a/test/modules/otjson-utilities.test.js b/test/modules/otjson-utilities.test.js new file mode 100644 index 0000000000..8fc82372dc --- /dev/null +++ b/test/modules/otjson-utilities.test.js @@ -0,0 +1,81 @@ +const { describe, before, it } = require('mocha'); +const { assert, expect } = require('chai'); +const OtJsonUtilities = require('../../modules/OtJsonUtilities'); +const Utilities = require('../../modules/Utilities'); + +describe('OtJson Utilities module', () => { + it('Sort object recursively', () => { + const properties = { + c: 1, + b: 'abc', + a: { + y: [3, 2, { + c: null, b: [3, 2, 1], a: undefined, s: 3, + }, 1, 0], + x: [9, 8, 7, 6, { x: 1, y: 2 }, 5, [6, 5, 4, 3, 1]], + }, + }; + const object = { + a: 1, + b: 'abc', + c: { d: [1, 2, 3, { e: null, x: undefined, properties }, { y: 1, f: 2 }] }, + }; + + const sortedObject = { + a: 1, + b: 'abc', + c: { + d: [1, 2, 3, { + e: null, + properties: { + a: { + x: [ + 9, + 8, + 7, + 6, + { + x: 1, + y: 2, + }, + 5, + [ + 6, + 5, + 4, + 3, + 1, + ], + ], + y: [ + 3, + 2, + { + a: null, + b: [ + 3, + 2, + 1, + ], + c: null, + s: 3, + }, + 1, + 0, + ], + }, + b: 'abc', + c: 1, + }, + x: null, + }, + { + f: 2, + y: 1, + }, + ], + }, + }; + assert.deepEqual(sortedObject, JSON.parse(Utilities.sortObjectRecursively(object))); + }); +}); From 8f01830629cb4d83491481b7b77820e8d7622d0c Mon Sep 17 00:00:00 2001 From: kotlarmilos Date: Wed, 20 May 2020 16:23:58 +0200 Subject: [PATCH 2/4] kademlia-encrypted-routing-protocol (#1260) --- modules/network/kademlia/kademlia.js | 51 ++++++++++++++-------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/modules/network/kademlia/kademlia.js b/modules/network/kademlia/kademlia.js index 38d29ae6fe..419a6451ab 100644 --- a/modules/network/kademlia/kademlia.js +++ b/modules/network/kademlia/kademlia.js @@ -339,33 +339,32 @@ class Kademlia { node.packMessage = async (contactId, message) => { // eslint-disable-next-line prefer-const let { contact, header } = await node.getContact(contactId); - // eslint-disable-next-line prefer-const let body = message; - // if (contact[0] !== contactId) { - // let publicKey = await this.networkService.getNodePublicKey(contactId); - // if (!publicKey) { - // try { - // const publicKeyData = await node.sendPublicKeyRequest( - // null, - // contactId, - // ); - // - // if (await this.networkService.validatePublicKeyData(publicKeyData)) { - // await this.networkService.setNodePublicKey(publicKeyData); - // } else { - // throw new Error('Public key validation error'); - // } - // publicKey = Buffer.from(publicKeyData.public_key, 'hex') - // .toString('hex'); - // } catch (e) { - // throw Error('Unable to get node public key for encryption'); - // } - // } - // body = await ECEncryption.encryptObject(message, publicKey); - // const messageHeader = JSON.parse(header); - // messageHeader.encrypted = true; - // header = JSON.stringify(messageHeader); - // } + if (contact[0] !== contactId) { + let publicKey = await this.networkService.getNodePublicKey(contactId); + if (!publicKey) { + try { + const publicKeyData = await node.sendPublicKeyRequest( + null, + contactId, + ); + + if (await this.networkService.validatePublicKeyData(publicKeyData)) { + await this.networkService.setNodePublicKey(publicKeyData); + } else { + throw new Error('Public key validation error'); + } + publicKey = Buffer.from(publicKeyData.public_key, 'hex') + .toString('hex'); + } catch (e) { + throw Error('Unable to get node public key for encryption'); + } + } + body = await ECEncryption.encryptObject(message, publicKey); + const messageHeader = JSON.parse(header); + messageHeader.encrypted = true; + header = JSON.stringify(messageHeader); + } return { contact, header, body }; }; From 8e927d3baff77b252a9b1267dfa703734e5b96e1 Mon Sep 17 00:00:00 2001 From: kotlarmilos Date: Wed, 20 May 2020 16:24:18 +0200 Subject: [PATCH 3/4] Handle network query issues (#1259) * network query issues * update sql query --- .../dh-read-data-location-request-command.js | 22 ++++++++++-- modules/controller/dv-controller.js | 34 +++++++++---------- test/bdd/features/datalayer.feature | 19 ++++++++++- test/bdd/steps/network.js | 10 +++++- 4 files changed, 64 insertions(+), 21 deletions(-) diff --git a/modules/command/dh/dh-read-data-location-request-command.js b/modules/command/dh/dh-read-data-location-request-command.js index bf663d1369..42e381a9d5 100644 --- a/modules/command/dh/dh-read-data-location-request-command.js +++ b/modules/command/dh/dh-read-data-location-request-command.js @@ -44,7 +44,7 @@ class DHReadDataLocationRequestCommand extends Command { // Filter imports not stored in local DB. let imports = await Models.data_info.findAll({ - attributes: ['data_set_id'], + attributes: ['data_set_id', 'data_provider_wallet'], where: { data_set_id: { [Op.in]: graphImports, @@ -58,8 +58,26 @@ class DHReadDataLocationRequestCommand extends Command { return Command.empty(); } + const validImports = []; + for (let i = 0; i < imports.length; i += 1) { + if (imports[i].data_provider_wallet.toLowerCase() + === this.config.node_wallet.toLowerCase()) { + // eslint-disable-next-line no-await-in-loop + const offer = await Models.offers.findOne({ + attributes: ['offer_id'], + where: { + data_set_id: imports[i].data_set_id, + }, + }); + + if (offer) { validImports.push(imports[i].data_set_id); } + } else { + validImports.push(imports[i].data_set_id); + } + } + // Convert to string array. - imports = imports.map(i => i.data_set_id); + imports = validImports; // Check if the import came from network. In more details I can only // distribute data gotten from someone else. diff --git a/modules/controller/dv-controller.js b/modules/controller/dv-controller.js index dce2503326..0dc5567521 100644 --- a/modules/controller/dv-controller.js +++ b/modules/controller/dv-controller.js @@ -571,24 +571,24 @@ class DVController { } if (networkQuery.status !== 'OPEN') { - throw Error('Too late. Query closed.'); + this.logger.info('Too late. Query closed.'); + } else { + await this.commandExecutor.add({ + name: 'dvDataLocationResponseCommand', + delay: 0, + data: { + queryId, + wallet: message.wallet, + nodeId: message.nodeId, + imports: message.imports, + dataPrice: message.dataPrice, + dataSize: message.dataSize, + stakeFactor: message.stakeFactor, + replyId: message.replyId, + }, + transactional: false, + }); } - - await this.commandExecutor.add({ - name: 'dvDataLocationResponseCommand', - delay: 0, - data: { - queryId, - wallet: message.wallet, - nodeId: message.nodeId, - imports: message.imports, - dataPrice: message.dataPrice, - dataSize: message.dataSize, - stakeFactor: message.stakeFactor, - replyId: message.replyId, - }, - transactional: false, - }); } async handleDataReadResponseFree(message) { diff --git a/test/bdd/features/datalayer.feature b/test/bdd/features/datalayer.feature index cc871337c6..e4cce4f707 100644 --- a/test/bdd/features/datalayer.feature +++ b/test/bdd/features/datalayer.feature @@ -219,4 +219,21 @@ Feature: Data layer related features And I wait for replications to finish Then DC should send a challenge request Then DH should send the challenge response - Then DC should verify the response \ No newline at end of file + Then DC should verify the response + + + @second + Scenario: Node should not respond to network query if he did't replicate it itself + Given the replication difficulty is 0 + And I setup 4 nodes + And I start the nodes + And I use 1st node as DC + And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1-EPCIS + And DC waits for import to finish + And I use 2nd node as DV + Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network + Then Answer for the last network query by DV should be empty + Given DC initiates the replication for last imported dataset + And I wait for replications to finish + Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network + Then all nodes with last import should answer to last network query by DV \ No newline at end of file diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js index 997598d659..03e76e1c9e 100644 --- a/test/bdd/steps/network.js +++ b/test/bdd/steps/network.js @@ -793,6 +793,14 @@ Given(/^I start additional node[s]*$/, { timeout: 5 * 60000 }, function () { return Promise.all(additionalNodesStarts); }); +Then(/^Answer for the last network query by ([DV|DV2]+) should be empty$/, { timeout: 90000 }, function (whichDV) { + expect(this.state.lastQueryNetworkId, 'Query not published yet.').to.not.be.undefined; + + const queryId = this.state.lastQueryNetworkId; + expect(!this.state.dataLocationQueriesConfirmations); +}); + + Then(/^all nodes with (last import|second last import) should answer to last network query by ([DV|DV2]+)$/, { timeout: 90000 }, async function (whichImport, whichDV) { expect(whichImport, 'last import or second last import are allowed values').to.be.oneOf(['last import', 'second last import']); whichImport = (whichImport === 'last import') ? 'lastImport' : 'secondLastImport'; @@ -808,7 +816,7 @@ Then(/^all nodes with (last import|second last import) should answer to last net this.state.nodes.forEach((node) => { promises.push(new Promise(async (accept) => { const body = await httpApiHelper.apiGetDatasetInfo(node.state.node_rpc_url, this.state[whichImport].data.dataset_id); - if (body.dataset_id === this.state[whichImport].data.dataset_id) { + if (body.dataset_id === this.state[whichImport].data.dataset_id && dv.state.identity !== node.state.identity) { nodeCandidates.push(node.state.identity); } accept(); From a2d9593a62e84a78e59a4774b15461ab3e818ef6 Mon Sep 17 00:00:00 2001 From: djordjekovac Date: Wed, 20 May 2020 16:24:34 +0200 Subject: [PATCH 4/4] Added support for permissioned data in gs1 epcis transpiler (#1258) * Added support for permissioned data in gs1 epcis transpiler * Added unit test for gs1 transpiler --- .../permissioned_data_simple_sample.xml | 40 +++++++++++++++ modules/constants.js | 14 ++++++ .../epcis/epcis-otjson-transpiler.js | 49 +++++++++++++++++++ package.json | 2 +- test/modules/epcis-otjson-transpiler.test.js | 25 ++++++++++ 5 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 importers/use_cases/marketplace/permissioned_data_simple_sample.xml diff --git a/importers/use_cases/marketplace/permissioned_data_simple_sample.xml b/importers/use_cases/marketplace/permissioned_data_simple_sample.xml new file mode 100644 index 0000000000..120d4e5d3b --- /dev/null +++ b/importers/use_cases/marketplace/permissioned_data_simple_sample.xml @@ -0,0 +1,40 @@ + + + + + 1.2 + + p:Identifier + + + p:Identifier + + + EPCglobal + 1.2 + p:InstanceIdentifier + MasterData + true + 2018-01-01T00:31:52Z + + + + + + + + + Green + 0xBbAaAd7BD40602B78C0649032D2532dEFa23A4C0 + Company producer + + + + + + + + + + + \ No newline at end of file diff --git a/modules/constants.js b/modules/constants.js index 1f6ed2a3ba..819c4d209b 100644 --- a/modules/constants.js +++ b/modules/constants.js @@ -117,3 +117,17 @@ exports.PROCESS_NAME = { challengesHandling: 'challenges-handling', litigationHandling: 'litigation-handling', }; + +/** + * + * @constant {string} PERMISSIONED_DATA_VISIBILITY_SHOW_ATTRIBUTE - + * visibility option for storing only attribute value to permissioned data + */ +exports.PERMISSIONED_DATA_VISIBILITY_SHOW_ATTRIBUTE = 'permissioned.show_attribute'; + +/** + * + * @constant {string} PERMISSIONED_DATA_VISIBILITY_HIDE_ATTRIBUTE - + * visibility option for storing attribute to permissioned data + */ +exports.PERMISSIONED_DATA_VISIBILITY_HIDE_ATTRIBUTE = 'permissioned.hide_attribute'; diff --git a/modules/transpiler/epcis/epcis-otjson-transpiler.js b/modules/transpiler/epcis/epcis-otjson-transpiler.js index 916109b2b0..83fcb4f3ba 100644 --- a/modules/transpiler/epcis/epcis-otjson-transpiler.js +++ b/modules/transpiler/epcis/epcis-otjson-transpiler.js @@ -4,6 +4,8 @@ const xsd = require('libxml-xsd'); const utilities = require('../../Utilities'); const importUtilities = require('../../ImportUtilities'); const OtJsonUtilities = require('../../OtJsonUtilities'); +const constants = require('../../constants'); + const fs = require('fs'); const deepExtend = require('deep-extend'); @@ -74,6 +76,7 @@ class EpcisOtJsonTranspiler { otjson['@id'] = ''; otjson['@type'] = 'Dataset'; otjson.datasetHeader = importUtilities.createDatasetHeader(this.config, transpilationInfo); + importUtilities.calculateGraphPermissionedDataHashes(otjson['@graph']); let result = OtJsonUtilities.prepareDatasetForNewImport(otjson); if (!result) { @@ -249,6 +252,33 @@ class EpcisOtJsonTranspiler { } } + // remove permissioned data from vocabularyElements + if (properties.___metadata.attribute && + Array.isArray(properties.___metadata.attribute)) { + for (let i = properties.___metadata.attribute.length - 1; i >= 0; i -= 1) { + const attribute = properties.___metadata.attribute[i]; + if (attribute._attributes.visibility && + attribute._attributes.visibility.startsWith('permissioned')) { + if (!properties.permissioned_data) { + properties.permissioned_data = { data: { attribute: [] } }; + } + properties.permissioned_data.data.attribute + .push(utilities.copyObject(attribute)); + if (attribute._attributes.visibility + === constants.PERMISSIONED_DATA_VISIBILITY_HIDE_ATTRIBUTE) { + // in this case we want to hide whole attribute + properties.___metadata.attribute.splice(i, 1); + delete properties[attribute._attributes.id]; + } else if (attribute._attributes.visibility + === constants.PERMISSIONED_DATA_VISIBILITY_SHOW_ATTRIBUTE) { + // in this case we want to hide attribute value + attribute._text = ''; + properties[attribute._attributes.id] = ''; + } + } + } + } + const otVocabulary = { '@id': vocabularyElement._attributes.id, '@type': 'otObject', @@ -303,6 +333,25 @@ class EpcisOtJsonTranspiler { continue; } + if (properties.permissioned_data && + properties.permissioned_data.data && + properties.permissioned_data.data.attribute && + Array.isArray(properties.permissioned_data.data.attribute)) { + properties.permissioned_data.data.attribute.forEach((attribute) => { + if (attribute._attributes.visibility && attribute._attributes.visibility + === constants.PERMISSIONED_DATA_VISIBILITY_SHOW_ATTRIBUTE) { + const element = properties.___metadata.attribute + .find(element => element._attributes.id === attribute._attributes.id); + element._text = attribute._text; + properties[attribute._attributes.id] = attribute._text; + } else if (attribute._attributes.visibility && attribute._attributes.visibility + === constants.PERMISSIONED_DATA_VISIBILITY_HIDE_ATTRIBUTE) { + properties.___metadata.attribute.push(attribute); + properties[attribute._attributes.id] = attribute._text; + } + }); + } + delete properties.objectType; const type = properties.vocabularyType; delete properties.vocabularyType; diff --git a/package.json b/package.json index 06d2e141dc..dd786abd33 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "4.1.1", + "version": "4.1.2", "description": "OriginTrail node", "main": ".eslintrc.js", "config": { diff --git a/test/modules/epcis-otjson-transpiler.test.js b/test/modules/epcis-otjson-transpiler.test.js index bb4536c806..d8664f4f15 100644 --- a/test/modules/epcis-otjson-transpiler.test.js +++ b/test/modules/epcis-otjson-transpiler.test.js @@ -38,7 +38,9 @@ describe('EPCIS OT JSON transpiler tests', () => { let selectedDatabase; const directoryPath = path.join(__dirname, '../../importers/epcis_12_examples/'); + const permissionedDataDirectoryPath = path.join(__dirname, '../../importers/use_cases/marketplace/'); const inputXmlFiles = fs.readdirSync(directoryPath).map(file => path.join(__dirname, `../../importers/epcis_12_examples/${file}`)); + const inputPermissionedDataFile = fs.readFileSync(`${permissionedDataDirectoryPath}permissioned_data_simple_sample.xml`); before('Init EPCIS transpiler', async () => { const config = rc(pjson.name, defaultConfig); @@ -115,6 +117,29 @@ describe('EPCIS OT JSON transpiler tests', () => { }); }); + describe('Convert XML with permissioned data into OT-JSON and back', () => { + it( + 'should correctly transpile permissioned data xml into OT-JSON and back', + // eslint-disable-next-line no-loop-func + async () => { + const xmlContents = inputPermissionedDataFile.toString(); + const otJson = transpiler.convertToOTJson(xmlContents); + + const attributes = otJson['@graph'][0].properties.___metadata.attribute; + assert.equal(attributes[0]._text, ''); + assert.equal(attributes.length, 2); + const permissionedDataAttributes = otJson['@graph'][0].properties.permissioned_data.data.attribute; + assert.equal(permissionedDataAttributes[0]._text, 'Company producer'); + assert.equal(permissionedDataAttributes[1]._attributes.id, 'urn:ot:object:actor:name'); + assert.equal(permissionedDataAttributes[1]._text, 'Green'); + + const exportedXml = transpiler.convertFromOTJson(otJson); + + assert.equal(xmlContents.trim(), exportedXml.trim()); + }, + ); + }); + describe('Convert empty XML into OT-JSON', () => { it('should fail on empty XML document', async () => { expect(transpiler.convertToOTJson.bind(transpiler, null)).to.throw('XML document cannot be empty');