Skip to content

Commit

Permalink
Merge pull request #1264 from OriginTrail/prerelease/mainnet
Browse files Browse the repository at this point in the history
OriginTrail Mainnet Release v4.1.2
  • Loading branch information
djordjekovac authored May 27, 2020
2 parents 5172831 + 8b1364c commit b60aca5
Show file tree
Hide file tree
Showing 27 changed files with 554 additions and 269 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<epcis:EPCISDocument xmlns:epcis="urn:epcglobal:epcis:xsd:1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.unece.org/cefact/namespaces/StandardBusinessDocumentHeader" schemaVersion="0" creationDate="2001-12-17T09:30:47Z" xsi:schemaLocation="urn:epcglobal:epcis:xsd:1 http://www.gs1si.org/BMS/epcis/1_2/EPCglobal-epcis-1_2.xsd">
<EPCISHeader>
<p:StandardBusinessDocumentHeader>
<p:HeaderVersion>1.2</p:HeaderVersion>
<p:Sender>
<p:Identifier>p:Identifier</p:Identifier>
</p:Sender>
<p:Receiver>
<p:Identifier>p:Identifier</p:Identifier>
</p:Receiver>
<p:DocumentIdentification>
<p:Standard>EPCglobal</p:Standard>
<p:TypeVersion>1.2</p:TypeVersion>
<p:InstanceIdentifier>p:InstanceIdentifier</p:InstanceIdentifier>
<p:Type>MasterData</p:Type>
<p:MultipleType>true</p:MultipleType>
<p:CreationDateAndTime>2018-01-01T00:31:52Z</p:CreationDateAndTime>
</p:DocumentIdentification>
</p:StandardBusinessDocumentHeader>
<extension>
<EPCISMasterData>
<VocabularyList>
<Vocabulary type="urn:ot:object:actor">
<VocabularyElementList>
<VocabularyElement id="urn:ot:object:actor:id:Company_Green">
<attribute id="urn:ot:object:actor:name" visibility="permissioned.show_attribute">Green</attribute>
<attribute id="urn:ot:object:actor:wallet">0xBbAaAd7BD40602B78C0649032D2532dEFa23A4C0</attribute>
<attribute id="urn:ot:object:actor:category" visibility="permissioned.hide_attribute">Company producer</attribute>
</VocabularyElement>
</VocabularyElementList>
</Vocabulary>
</VocabularyList>
</EPCISMasterData>
</extension>
</EPCISHeader>
<EPCISBody>
<EventList/>
</EPCISBody>
</epcis:EPCISDocument>
49 changes: 31 additions & 18 deletions modules/ImportUtilities.js
Original file line number Diff line number Diff line change
Expand Up @@ -140,8 +140,10 @@ class ImportUtilities {
}

static prepareDataset(originalDocument, config, web3) {
const document = originalDocument; // todo add otJsonService
const graph = document['@graph'];
let document = OtJsonUtilities.prepareDatasetForNewImport(originalDocument);
if (!document) {
document = originalDocument;
}
const datasetHeader = document.datasetHeader ? document.datasetHeader : {};
ImportUtilities.calculateGraphPermissionedDataHashes(document['@graph']);
const id = ImportUtilities.calculateGraphPublicHash(document);
Expand All @@ -157,7 +159,7 @@ class ImportUtilities {
'@id': id,
'@type': 'Dataset',
datasetHeader: header,
'@graph': graph,
'@graph': document['@graph'],
};

const rootHash = ImportUtilities.calculateDatasetRootHash(dataset);
Expand Down Expand Up @@ -452,9 +454,11 @@ class ImportUtilities {
}

static calculateDatasetRootHash(dataset) {
const datasetClone = Utilities.copyObject(dataset);
ImportUtilities.removeGraphPermissionedData(datasetClone['@graph']);
const sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingRootHash(datasetClone);
let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingRootHash(dataset);
if (!sortedDataset) {
sortedDataset = Utilities.copyObject(dataset);
}
ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']);
const datasetId = sortedDataset['@id'];
const datasetCreator = sortedDataset.datasetHeader.dataCreator;

Expand Down Expand Up @@ -590,7 +594,10 @@ class ImportUtilities {
* @returns {string}
*/
static calculateGraphPublicHash(dataset) {
const sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingGraphHash(dataset);
let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingGraphHash(dataset);
if (!sortedDataset) {
sortedDataset = Utilities.copyObject(dataset);
}
ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']);
return `0x${sha3_256(JSON.stringify(sortedDataset['@graph']), null, 0)}`;
}
Expand Down Expand Up @@ -627,32 +634,38 @@ class ImportUtilities {
}

/**
* Sign OT-JSON
* Sign dataset
* @static
*/
static signDataset(otjson, config, web3) {
const sortedOTJson = OtJsonUtilities.prepareDatasetForGeneratingSignature(otjson);
ImportUtilities.removeGraphPermissionedData(sortedOTJson['@graph']);
static signDataset(dataset, config, web3) {
let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingSignature(dataset);
if (!sortedDataset) {
sortedDataset = Utilities.copyObject(dataset);
}
ImportUtilities.removeGraphPermissionedData(sortedDataset['@graph']);
const { signature } = web3.eth.accounts.sign(
JSON.stringify(sortedOTJson),
JSON.stringify(sortedDataset),
Utilities.normalizeHex(config.node_private_key),
);
otjson.signature = {
dataset.signature = {
value: signature,
type: 'ethereum-signature',
};

return otjson;
return dataset;
}

/**
* Extract Signer from OT-JSON signature
* @static
*/
static extractDatasetSigner(otjson, web3) {
const strippedOtjson = OtJsonUtilities.prepareDatasetForGeneratingSignature(otjson);
delete strippedOtjson.signature;
return web3.eth.accounts.recover(JSON.stringify(strippedOtjson), otjson.signature.value);
static extractDatasetSigner(dataset, web3) {
let sortedDataset = OtJsonUtilities.prepareDatasetForGeneratingSignature(dataset);
if (!sortedDataset) {
sortedDataset = Utilities.copyObject(dataset);
}
delete sortedDataset.signature;
return web3.eth.accounts.recover(JSON.stringify(sortedDataset), dataset.signature.value);
}


Expand Down
117 changes: 93 additions & 24 deletions modules/OtJsonUtilities.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,12 @@ class OtJsonUtilities {
case '1.0':
datasetCopy = Utilities.copyObject(dataset);
datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true));
break;
return datasetCopy;
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}

return datasetCopy;
}

/**
Expand All @@ -85,24 +85,33 @@ class OtJsonUtilities {
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return datasetCopy;
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset IN PLACE so that the signature can be generated properly
* Formats the dataset for export
*
* @param dataset
* @returns {any}|undefined
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForExtractSigner(dataset) {
static prepareDatasetForDatabaseRead(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;

switch (version) {
case '1.0':
OtJsonUtilities.sortGraphRelationsAndIdentifiers(dataset['@graph']);
break;
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return datasetCopy;
case '1.1':
datasetCopy = Utilities.copyObject(dataset);
return JSON.parse(Utilities.sortObjectRecursively(datasetCopy));
default:
throw new Error('Unsupported ot-json version!');
}
Expand All @@ -125,6 +134,8 @@ class OtJsonUtilities {
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return JSON.parse(Utilities.sortedStringify(datasetCopy, false));
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
Expand All @@ -147,39 +158,44 @@ class OtJsonUtilities {
datasetCopy = Utilities.copyObject(dataset);
datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true));
return datasetCopy;
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset for proper generating of offer challenges
* Formats the dataset for proper generating of Merkle proofs for ot-objects
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the
* dataset it already formatted
*/
static prepareDatasetForGeneratingChallenges(dataset) {
static prepareDatasetForGeneratingMerkleProofs(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;

switch (version) {
case '1.0':
datasetCopy = Utilities.copyObject(dataset);
datasetCopy['@graph'] = JSON.parse(Utilities.sortedStringify(datasetCopy['@graph'], true));
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return datasetCopy;
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset for proper generating of Merkle proofs for ot-objects
* Formats the dataset so that the dataset can be read and imported by a Data Viewer
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the
* dataset it already formatted
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForGeneratingMerkleProofs(dataset) {
static prepareDatasetForDataRead(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;
Expand All @@ -188,20 +204,22 @@ class OtJsonUtilities {
case '1.0':
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return datasetCopy;
return JSON.parse(Utilities.sortedStringify(datasetCopy, false));
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset so that the dataset can be read and imported by a Data Viewer
* Formats the dataset so that the dataset can be imported to the graph database as old otJson
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForDataRead(dataset) {
static prepareDatasetForOldImport(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;
Expand All @@ -211,41 +229,44 @@ class OtJsonUtilities {
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return JSON.parse(Utilities.sortedStringify(datasetCopy, false));
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset so that the dataset can be imported to the graph database
* Formats the dataset so that the dataset can be imported to the graph database as new otJson
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForImport(dataset) {
static prepareDatasetForNewImport(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;

switch (version) {
case '1.0':
return undefined;
case '1.1':
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return JSON.parse(Utilities.sortedStringify(datasetCopy, false));
return JSON.parse(Utilities.sortObjectRecursively(datasetCopy));
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset so that the dataset can be exported and validated in OT-JSON format
* Formats the dataset so that the dataset can be exported and validated in old OT-JSON format
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForExport(dataset) {
static prepareDatasetForOldExport(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;
Expand All @@ -255,6 +276,54 @@ class OtJsonUtilities {
datasetCopy = Utilities.copyObject(dataset);
OtJsonUtilities.sortGraphRelationsAndIdentifiers(datasetCopy['@graph']);
return datasetCopy;
case '1.1':
return undefined;
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset so that the dataset can be exported and validated in new OT-JSON format
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForNewExport(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;

switch (version) {
case '1.0':
return undefined;
case '1.1':
datasetCopy = Utilities.copyObject(dataset);
return JSON.parse(Utilities.sortObjectRecursively(datasetCopy));
default:
throw new Error('Unsupported ot-json version!');
}
}

/**
* Formats the dataset for replication import
*
* @param dataset
* @returns {any}|undefined - A formatted copy of the dataset, or undefined if the dataset is
* already formatted
*/
static prepareDatasetForNewReplication(dataset) {
const version = OtJsonUtilities._getDatasetVersion(dataset);

let datasetCopy;

switch (version) {
case '1.0':
return undefined;
case '1.1':
datasetCopy = Utilities.copyObject(dataset);
return JSON.parse(Utilities.sortObjectRecursively(datasetCopy));
default:
throw new Error('Unsupported ot-json version!');
}
Expand Down
Loading

0 comments on commit b60aca5

Please sign in to comment.