From 36fee254c21c3e87436a5ee575883d257246a436 Mon Sep 17 00:00:00 2001 From: kotlarmilos Date: Mon, 1 Jun 2020 09:23:56 +0200 Subject: [PATCH 1/9] Enable OT-JSON Version 1.1 (#1265) * enable-otjson-version-1.1 --- modules/ImportUtilities.js | 2 +- modules/OtJsonUtilities.js | 2 +- modules/transpiler/epcis/epcis-otjson-transpiler.js | 2 +- test/modules/epcis-otjson-transpiler.test.js | 11 +++++++++-- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/modules/ImportUtilities.js b/modules/ImportUtilities.js index c1a790641..5eafaeee6 100644 --- a/modules/ImportUtilities.js +++ b/modules/ImportUtilities.js @@ -673,7 +673,7 @@ class ImportUtilities { * Fill in dataset header * @private */ - static createDatasetHeader(config, transpilationInfo = null, datasetTags = [], datasetTitle = '', datasetDescription = '', OTJSONVersion = '1.0') { + static createDatasetHeader(config, transpilationInfo = null, datasetTags = [], datasetTitle = '', datasetDescription = '', OTJSONVersion = '1.1') { const header = { OTJSONVersion, datasetCreationTimestamp: new Date().toISOString(), diff --git a/modules/OtJsonUtilities.js b/modules/OtJsonUtilities.js index 691a44e25..41bc7dcf1 100644 --- a/modules/OtJsonUtilities.js +++ b/modules/OtJsonUtilities.js @@ -36,7 +36,7 @@ class OtJsonUtilities { static _getDatasetVersion(dataset) { if (!dataset || !dataset.datasetHeader || !dataset.datasetHeader.OTJSONVersion) { - return '1.0'; + return '1.1'; // throw new Error('Could not determine dataset ot-json version!'); } return dataset.datasetHeader.OTJSONVersion; diff --git a/modules/transpiler/epcis/epcis-otjson-transpiler.js b/modules/transpiler/epcis/epcis-otjson-transpiler.js index 83fcb4f3b..b0a58537a 100644 --- a/modules/transpiler/epcis/epcis-otjson-transpiler.js +++ b/modules/transpiler/epcis/epcis-otjson-transpiler.js @@ -1166,7 +1166,7 @@ class EpcisOtJsonTranspiler { return { transpilationInfo: { transpilerType: 'GS1-EPCIS', - transpilerVersion: '1.0', + transpilerVersion: '1.1', sourceMetadata: { created: created.toISOString(), modified: created.toISOString(), diff --git a/test/modules/epcis-otjson-transpiler.test.js b/test/modules/epcis-otjson-transpiler.test.js index d8664f4f1..911109d3f 100644 --- a/test/modules/epcis-otjson-transpiler.test.js +++ b/test/modules/epcis-otjson-transpiler.test.js @@ -123,6 +123,10 @@ describe('EPCIS OT JSON transpiler tests', () => { // eslint-disable-next-line no-loop-func async () => { const xmlContents = inputPermissionedDataFile.toString(); + const expectedJson = xml2js.xml2js(xmlContents, { + compact: true, + spaces: 4, + }); const otJson = transpiler.convertToOTJson(xmlContents); const attributes = otJson['@graph'][0].properties.___metadata.attribute; @@ -134,8 +138,11 @@ describe('EPCIS OT JSON transpiler tests', () => { assert.equal(permissionedDataAttributes[1]._text, 'Green'); const exportedXml = transpiler.convertFromOTJson(otJson); - - assert.equal(xmlContents.trim(), exportedXml.trim()); + const returnedJson = xml2js.xml2js(exportedXml, { + compact: true, + spaces: 4, + }); + assert.equal(Utilities.sortObjectRecursively(expectedJson), Utilities.sortObjectRecursively(returnedJson)); }, ); }); From 307da4c755a964857cbaa38d423fd5217796fced Mon Sep 17 00:00:00 2001 From: Uros Kukic <33048701+Kuki145@users.noreply.github.com> Date: Mon, 1 Jun 2020 09:26:00 +0200 Subject: [PATCH 2/9] Generate unique graph database password as default (#1266) * Add graph database password generating * Add genenrating data folder before generating password file * Add one-time password update migration script for ot-node * Run password migration for docker distributions only * Add password_file_name as a database configuration parameter * update unit tests and setup script * Enable supervisorctl to work inside docker * Remove the accidentally added testing.sh file * Fix timing issue for password update script and skip password update for non default passwords * Add error handling for Arango Server Password update migration * Fix some parts of Arango Password Update error handling * Wait for Arango to spin up before returning from the script Co-authored-by: Milos Kotlar --- config/config.json | 3 + .../migration/m5-arango-password-migration.js | 36 ++++++++++++ ot-node.js | 47 ++++++++++++++- scripts/backup.js | 2 +- scripts/setup.js | 27 ++++++--- scripts/update-arango-password.sh | 58 +++++++++++++++++++ test/modules/graph.test.js | 2 +- test/modules/graphstorage.test.js | 2 +- test/modules/utilities.test.js | 4 +- testnet/install-arango.sh | 29 ++++++---- testnet/supervisord.conf | 6 ++ 11 files changed, 193 insertions(+), 23 deletions(-) create mode 100644 modules/migration/m5-arango-password-migration.js create mode 100755 scripts/update-arango-password.sh diff --git a/config/config.json b/config/config.json index 305404aaf..b1457e7a0 100644 --- a/config/config.json +++ b/config/config.json @@ -41,6 +41,7 @@ "provider": "arangodb", "username": "root", "password": "root", + "password_file_name": "arango.txt", "port": 8529, "database": "origintrail-develop", "host": "localhost", @@ -152,6 +153,7 @@ "provider": "arangodb", "username": "root", "password": "root", + "password_file_name": "arango.txt", "port": 8529, "database": "origintrail", "host": "localhost", @@ -267,6 +269,7 @@ "provider": "arangodb", "username": "root", "password": "root", + "password_file_name": "arango.txt", "port": 8529, "database": "origintrail", "host": "localhost", diff --git a/modules/migration/m5-arango-password-migration.js b/modules/migration/m5-arango-password-migration.js new file mode 100644 index 000000000..57ad69927 --- /dev/null +++ b/modules/migration/m5-arango-password-migration.js @@ -0,0 +1,36 @@ +const Utilities = require('../Utilities'); +const { execSync } = require('child_process'); + +const fs = require('fs'); +const path = require('path'); + +/** + * Changes the arango password to a randomly generated one + */ +class M5ArangoPasswordMigration { + constructor({ + config, log, + }) { + this.config = config; + this.log = log; + } + + /** + * Run migration + */ + async run() { + try { + execSync('cp ./scripts/update-arango-password.sh ./'); + execSync('chmod +x update-arango-password.sh'); + execSync(`./update-arango-password.sh ${this.config.appDataPath} ${this.config.database.host} ${this.config.database.port}`, { stdio: 'inherit' }); + execSync('rm ./update-arango-password.sh'); + return 0; + } catch (error) { + this.log.error('Arango password update migration failed!'); + this.log.error(error); + return -1; + } + } +} + +module.exports = M5ArangoPasswordMigration; diff --git a/ot-node.js b/ot-node.js index 539a600ec..14781bf3a 100644 --- a/ot-node.js +++ b/ot-node.js @@ -44,6 +44,7 @@ const M1PayoutAllMigration = require('./modules/migration/m1-payout-all-migratio const M2SequelizeMetaMigration = require('./modules/migration/m2-sequelize-meta-migration'); const M3NetowrkIdentityMigration = require('./modules/migration/m3-network-identity-migration'); const M4ArangoMigration = require('./modules/migration/m4-arango-migration'); +const M5ArangoPasswordMigration = require('./modules/migration/m5-arango-password-migration'); const ImportWorkerController = require('./modules/worker/import-worker-controller'); const ImportService = require('./modules/service/import-service'); const OtJsonUtilities = require('./modules/OtJsonUtilities'); @@ -184,7 +185,6 @@ class OTNode { config.identity = ''; config.erc725Identity = ''; config.publicKeyData = {}; - Object.seal(config); const web3 = new Web3(new Web3.providers.HttpProvider(config.blockchain.rpc_server_url)); @@ -198,6 +198,24 @@ class OTNode { // check if ArangoDB service is running at all if (config.database.provider === 'arangodb') { try { + if (process.env.OT_NODE_DISTRIBUTION === 'docker' + && (''.localeCompare(config.database.password) === 0 + || 'root'.localeCompare(config.database.password) === 0)) { + await this._runArangoPasswordMigration(config); + } + + // get password for database + const databasePasswordFilePath = path + .join(config.appDataPath, config.database.password_file_name); + if (fs.existsSync(databasePasswordFilePath)) { + log.info('Using existing graph database password.'); + config.database.password = fs.readFileSync(databasePasswordFilePath).toString(); + } else { + log.notify('================================================================'); + log.notify(' Using default database password for access '); + log.notify('================================================================'); + } + const { version } = await Utilities.getArangoDbVersion(config); log.info(`Arango server version ${version} is up and running`); @@ -221,6 +239,8 @@ class OTNode { } } + Object.seal(config); + // Checking if selected graph database exists try { await Utilities.checkDoesStorageDbExists(config); @@ -411,6 +431,31 @@ class OTNode { } } + async _runArangoPasswordMigration(config) { + const migrationsStartedMills = Date.now(); + + const m5ArangoPasswordMigrationFilename = '5_m5ArangoPasswordMigrationFile'; + const migrationDir = path.join(config.appDataPath, 'migrations'); + const migrationFilePath = path.join(migrationDir, m5ArangoPasswordMigrationFilename); + if (!fs.existsSync(migrationFilePath)) { + const migration = new M5ArangoPasswordMigration({ log, config }); + try { + log.info('Initializing Arango password migration...'); + const result = await migration.run(); + if (result === 0) { + log.notify(`One-time password migration completed. Lasted ${Date.now() - migrationsStartedMills} millisecond(s)`); + await Utilities.writeContentsToFile(migrationDir, m5ArangoPasswordMigrationFilename, 'PROCESSED'); + } else { + log.error('One-time password migration failed. Defaulting to previous implementation'); + } + } catch (e) { + log.error(`Failed to run code migrations. Lasted ${Date.now() - migrationsStartedMills} millisecond(s). ${e.message}`); + console.log(e); + process.exit(1); + } + } + } + /** * Run one time payout migration * @param blockchain diff --git a/scripts/backup.js b/scripts/backup.js index daf42cedc..c6d0059cb 100644 --- a/scripts/backup.js +++ b/scripts/backup.js @@ -49,7 +49,7 @@ const backupPath = argv.backup_directory.replace(/\/$/, ''); console.log('Setup path variables...'); -const files = ['identity.json', 'kademlia.crt', 'kademlia.key', 'houston.txt', 'system.db', 'erc725_identity.json', configName]; +const files = ['identity.json', 'kademlia.crt', 'kademlia.key', 'arango.txt', 'houston.txt', 'system.db', 'erc725_identity.json', configName]; const certs = ['fullchain.pem', 'privkey.pem']; let configFile; diff --git a/scripts/setup.js b/scripts/setup.js index 7a01a8add..ec2de5d7e 100644 --- a/scripts/setup.js +++ b/scripts/setup.js @@ -60,7 +60,6 @@ if (argv.configDir) { // Add arango DBs. arangoDbs.push(configjson.development.database); arangoDbs.push(configjson.testnet.database); - arangoDbs.push(configjson.mainnet.database); } else { configDirs.push(path.join( homedir, @@ -111,17 +110,31 @@ configDirs.forEach((configPath) => { async function resetArangoDb(database) { console.info(`Setting up graph database '${database.database}'...`); const systemDb = new Database(); + // + const databasePasswordFilePath = path.join(homedir, `.${pjson.name}rc`, database.password_file_name); + if (fs.existsSync(databasePasswordFilePath)) { + console.info('Using existing graph database password.'); + database.password = fs.readFileSync(databasePasswordFilePath).toString(); + } else { + console.info('================================================================'); + console.info(' Using default database password for access '); + console.info('================================================================'); + } + systemDb.useBasicAuth(database.username, database.password); - // Drop test database if exist. - const listOfDatabases = await systemDb.listDatabases(); + let listOfDatabases; + try { + listOfDatabases = await systemDb.listDatabases(); + } catch (e) { + systemDb.useBasicAuth(database.username, ''); + listOfDatabases = await systemDb.listDatabases(); + } if (listOfDatabases.includes(database.database)) { - await - systemDb.dropDatabase(database.database); + await systemDb.dropDatabase(database.database); } - await - systemDb.createDatabase( + await systemDb.createDatabase( database.database, [{ username: database.username, passwd: database.password, active: true }], ); diff --git a/scripts/update-arango-password.sh b/scripts/update-arango-password.sh new file mode 100755 index 000000000..6b6453465 --- /dev/null +++ b/scripts/update-arango-password.sh @@ -0,0 +1,58 @@ +#!/bin/bash +echo Running arango password update script... + +FOLDERDIR=$1 +echo Using ${FOLDERDIR} as node data folder + +touch ${FOLDERDIR}/arango.txt +new_arango_password=$(openssl rand -base64 32) +echo Generated new arango password! + +echo -n $new_arango_password > ${FOLDERDIR}/arango.txt +echo New arango password stored in ${FOLDERDIR}/arango.txt file + +#cat ${FOLDERDIR}/arango.txt +#echo Generated new arango password: $new_arango_password + +touch arango-password-script.js + +echo 'try {' > arango-password-script.js +echo ' require("@arangodb/users").replace("root", ARGUMENTS[0]);'>> arango-password-script.js +echo ' print("SUCCESS");' >> arango-password-script.js +echo '} catch (error) {' >> arango-password-script.js +echo ' print("FAILURE");' >> arango-password-script.js +echo ' print(error);' >> arango-password-script.js +echo '}' >> arango-password-script.js + +echo Updating arango server password + +supervisorctl stop arango +sed -i 's/authentication = true/authentication = false/g' /etc/arangodb3/arangod.conf +supervisorctl start arango +sleep 10s + +status=$(/usr/bin/arangosh --server.password "" --javascript.execute arango-password-script.js ${new_arango_password}) + +supervisorctl stop arango +sed -i 's/authentication = false/authentication = true/g' /etc/arangodb3/arangod.conf +supervisorctl start arango +sleep 10 + +rm arango-password-script.js + +if [[ $status != "SUCCESS" ]]; +then + echo "Password update failed" + echo $status + mv ${FOLDERDIR}/arango.txt ${FOLDERDIR}/arango_failed.txt + exit 1 +fi + +echo "" +echo "===================================================" +echo "==== ====" +echo "==== Arango password successfully updated! ====" +echo "==== ====" +echo "===================================================" + + diff --git a/test/modules/graph.test.js b/test/modules/graph.test.js index b880047bd..5e0f1a937 100644 --- a/test/modules/graph.test.js +++ b/test/modules/graph.test.js @@ -18,7 +18,7 @@ describe('graph module ', () => { const config = rc(pjson.name, defaultConfig); Storage.models = (await models.sequelize.sync()).models; assert.hasAllKeys(config.database, ['provider', 'username', 'password', - 'host', 'port', 'database', 'max_path_length']); + 'password_file_name', 'host', 'port', 'database', 'max_path_length']); }); after('drop myDatabaseName db', async () => { diff --git a/test/modules/graphstorage.test.js b/test/modules/graphstorage.test.js index aee42f017..fbf485cc3 100644 --- a/test/modules/graphstorage.test.js +++ b/test/modules/graphstorage.test.js @@ -39,7 +39,7 @@ describe('GraphStorage module', () => { selectedDatabase.database = myDatabaseName; Storage.models = deasync(models.sequelize.sync()).models; assert.hasAllKeys(selectedDatabase, ['provider', 'username', 'password', - 'host', 'port', 'max_path_length', 'database']); + 'password_file_name', 'host', 'port', 'max_path_length', 'database']); selectedDatabase.database = myDatabaseName; if (selectedDatabase.provider === 'arangodb') { diff --git a/test/modules/utilities.test.js b/test/modules/utilities.test.js index 9085c3aff..3def334bd 100644 --- a/test/modules/utilities.test.js +++ b/test/modules/utilities.test.js @@ -32,7 +32,7 @@ describe('Utilities module', () => { `Some config items are missing in config for environment '${environment}'`, ); assert.hasAllKeys( - config.database, ['provider', 'username', 'password', 'database', 'port', 'host', 'max_path_length'], + config.database, ['provider', 'username', 'password', 'password_file_name', 'database', 'port', 'host', 'max_path_length'], `Some config items are missing in config.database for environment '${environment}'`, ); assert.hasAllKeys( @@ -155,7 +155,7 @@ describe('Utilities module', () => { environments.forEach((environment) => { const config = configJson[environment]; assert.hasAllKeys(config.database, ['provider', 'username', 'password', - 'host', 'port', 'database', 'max_path_length']); + 'host', 'port', 'password_file_name', 'database', 'max_path_length']); assert.equal(config.database.provider, 'arangodb'); }); }); diff --git a/testnet/install-arango.sh b/testnet/install-arango.sh index 3e3626560..945acd824 100644 --- a/testnet/install-arango.sh +++ b/testnet/install-arango.sh @@ -1,12 +1,21 @@ #!bin/bash curl -OL https://download.arangodb.com/arangodb35/DEBIAN/Release.key - apt-key add - < Release.key - echo 'deb https://download.arangodb.com/arangodb35/DEBIAN/ /' | tee /etc/apt/sources.list.d/arangodb.list - apt-get install apt-transport-https -y - apt-get update -y - echo arangodb3 arangodb3/password password root | debconf-set-selections - echo arangodb3 arangodb3/password_again password root | debconf-set-selections - echo arangodb3 arangodb3/upgrade boolean false | debconf-set-selections - echo arangodb3 arangodb3/storage_engine select auto | debconf-set-selections - apt-get install arangodb3=3.5.3-1 -y --allow-unauthenticated - sed -i 's/authentication = true/authentication = false/g' /etc/arangodb3/arangod.conf \ No newline at end of file +apt-key add - < Release.key + +echo 'deb https://download.arangodb.com/arangodb35/DEBIAN/ /' | tee /etc/apt/sources.list.d/arangodb.list +apt-get install apt-transport-https -y +apt-get update -y + +mkdir -p /ot-node/data +touch /ot-node/data/arango.txt +arango_password=$(openssl rand -base64 128) +echo $arango_password > /ot-node/data/arango.txt + +echo arangodb3 arangodb3/password password $arango_password | debconf-set-selections +echo arangodb3 arangodb3/password_again password $arango_password | debconf-set-selections +echo arangodb3 arangodb3/upgrade boolean false | debconf-set-selections +echo arangodb3 arangodb3/storage_engine select auto | debconf-set-selections +apt-get install arangodb3=3.5.3-1 -y --allow-unauthenticated +sed -i 's/authentication = true/authentication = false/g' /etc/arangodb3/arangod.conf + +arango_password="" \ No newline at end of file diff --git a/testnet/supervisord.conf b/testnet/supervisord.conf index 62332aae3..49bc94a28 100644 --- a/testnet/supervisord.conf +++ b/testnet/supervisord.conf @@ -4,6 +4,12 @@ directory=/ot-node/current logfile=/dev/null logfile_maxbytes=0 +[unix_http_server] +file=/var/run/supervisor.sock + +[rpcinterface:supervisor] +supervisor.rpcinterface_factory=supervisor.rpcinterface:make_main_rpcinterface + [program:otnode] command=bash -c "set -o pipefail; cd /ot-node/current; node --max-old-space-size=2048 /ot-node/current/testnet/register-node.js --configDir=/ot-node/data/ | tee -a complete-node.log" redirect_stderr=true From a42dc2a432a74f40981b37bf8b1007e88793d773 Mon Sep 17 00:00:00 2001 From: djordjekovac Date: Mon, 1 Jun 2020 09:26:39 +0200 Subject: [PATCH 3/9] Added update price factor script (#1267) --- scripts/update_price_factor.sh | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 scripts/update_price_factor.sh diff --git a/scripts/update_price_factor.sh b/scripts/update_price_factor.sh new file mode 100644 index 000000000..425452399 --- /dev/null +++ b/scripts/update_price_factor.sh @@ -0,0 +1,19 @@ +#!/bin/bash +if [[ -z "$3" ]]; then + echo "No path provided, using default config path..." + path="$HOME/.origintrail_noderc" +else + path=$3 +fi + +if [[ ("$1" != "dc") && ("$1" != "dh")]]; then + echo "Invalid argument 1!" +else + if [[ -z $(grep "$1_price_factor" "$path") ]]; then + sed -i "/\"blockchain\": {/a \"$1_price_factor\" : \"$2\"," "$path" + else + sed -i "s/\(\"$1_price_factor\" : \)\"[0-9\.]*\"/\1\"$2\"/g" "$path" + fi + + docker restart otnode && docker logs otnode -f --tail 1000 +fi \ No newline at end of file From 370cf0b7c1eda048db0659e2fdd8ebea132eb78b Mon Sep 17 00:00:00 2001 From: djordjekovac Date: Wed, 3 Jun 2020 09:29:01 +0200 Subject: [PATCH 4/9] Resolved bug with unhandled timeout error (#1269) --- config/config.json | 2 +- modules/network/kademlia/kademlia.js | 24 ++++++++++++++++-------- modules/network/transport.js | 2 +- package.json | 2 +- 4 files changed, 19 insertions(+), 11 deletions(-) diff --git a/config/config.json b/config/config.json index b1457e7a0..c7e1300f1 100644 --- a/config/config.json +++ b/config/config.json @@ -224,7 +224,7 @@ "dc_choose_time": 300000, "requireApproval": false, "litigationEnabled": true, - "commandExecutorVerboseLoggingEnabled": true, + "commandExecutorVerboseLoggingEnabled": false, "reputationWindowInMinutes": 129600 }, "mainnet": { diff --git a/modules/network/kademlia/kademlia.js b/modules/network/kademlia/kademlia.js index 419a6451a..a83242be0 100644 --- a/modules/network/kademlia/kademlia.js +++ b/modules/network/kademlia/kademlia.js @@ -405,17 +405,14 @@ class Kademlia { if (err) { reject(Error(`Failed to find contact ${contactId}. ${err}`)); } - if (result && Array.isArray(result)) { - const contact = result[0]; - if (contact && Array.isArray(contact) && contact.length === 2 + const contact = this._getContactFromInterativeFindNodeResult(result); + if (contact && Array.isArray(contact) && contact.length === 2 && contact[1].hostname && contact[1].port && contact[0] === contact[1].identity) { - this.log.debug(`Found proxy contact in routing table. ${contact[0]} - ${contact[1].hostname}:${contact[1].port}`); - accept({ contact, header }); - } - reject(Error(`Unknown contact ${contactId}.`)); + this.log.debug(`Found proxy contact in routing table. ${contact[0]} - ${contact[1].hostname}:${contact[1].port}`); + accept({ contact, header }); } - reject(Error(`Failed to find contact ${contactId}. Not array: ${result}`)); + reject(Error(`Unknown contact ${contactId}.`)); }); }); }; @@ -845,6 +842,17 @@ class Kademlia { proof, })); } + + _getContactFromInterativeFindNodeResult(result) { + if (result && Array.isArray(result)) { + result.forEach((contact) => { + if (contact[0] !== this.config.identity) { + return contact; + } + }); + } + return null; + } } module.exports = Kademlia; diff --git a/modules/network/transport.js b/modules/network/transport.js index a6ec4e696..421983763 100644 --- a/modules/network/transport.js +++ b/modules/network/transport.js @@ -149,7 +149,7 @@ class Transport { halt(err); return; } - this.logger.debug(`Calling ${fn} operation failed at ${iteration} iteration. Contact ${contactId}, ${err}.\n${err.stack}`); + this.logger.debug(`Calling ${fn} operation failed at ${iteration} iteration. Contact ${contactId}, ${err}.}`); throw err; } }, opts); diff --git a/package.json b/package.json index dd786abd3..fa726b45a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "4.1.2", + "version": "4.1.3", "description": "OriginTrail node", "main": ".eslintrc.js", "config": { From d11828dec1915be4ec841469d1fbcb73fac3cf68 Mon Sep 17 00:00:00 2001 From: djordjekovac Date: Thu, 4 Jun 2020 11:23:14 +0200 Subject: [PATCH 5/9] Added new parameters to node info route (#1270) * Added new parameters to node info route --- modules/controller/info-controller.js | 50 ++++++++++++++++++++++++ modules/service/rest-api-controller.js | 49 ++--------------------- modules/service/rest-api-v2.js | 54 ++------------------------ 3 files changed, 58 insertions(+), 95 deletions(-) create mode 100644 modules/controller/info-controller.js diff --git a/modules/controller/info-controller.js b/modules/controller/info-controller.js new file mode 100644 index 000000000..d6d6a12fe --- /dev/null +++ b/modules/controller/info-controller.js @@ -0,0 +1,50 @@ +const pjson = require('../../package.json'); + +class InfoController { + constructor(ctx) { + this.logger = ctx.logger; + this.transport = ctx.transport; + this.config = ctx.config; + this.graphStorage = ctx.graphStorage; + } + + async getNodeInfo(req, res) { + this.logger.api('GET: Node information request received.'); + + try { + const network = await this.transport.getNetworkInfo(); + + const basicConfig = { + version: pjson.version, + blockchain: this.config.blockchain.blockchain_title, + network, + is_bootstrap: this.config.is_bootstrap_node, + }; + + if (!this.config.is_bootstrap_node) { + const numberOfVertices = await this.graphStorage.getDocumentsCount('ot_vertices'); + const numberOfEdges = await this.graphStorage.getDocumentsCount('ot_edges'); + Object.assign(basicConfig, { + node_wallet: this.config.node_wallet, + erc_725_identity: this.config.erc725Identity, + graph_size: { + number_of_vertices: numberOfVertices, + number_of_edges: numberOfEdges, + }, + }); + } + + res.status(200); + res.send(basicConfig); + } catch (error) { + this.logger.error(`Failed to process /api/info route. ${error}`); + res.status(500); + res.send({ + message: error, + }); + } + } +} + +module.exports = InfoController; + diff --git a/modules/service/rest-api-controller.js b/modules/service/rest-api-controller.js index 486e3ce6e..01e5a9d9c 100644 --- a/modules/service/rest-api-controller.js +++ b/modules/service/rest-api-controller.js @@ -15,7 +15,7 @@ class RestApiController { this.logger = ctx.logger; this.apiUtilities = ctx.apiUtilities; this.emitter = ctx.emitter; - + this.infoController = ctx.infoController; this.version_id = 'controller'; this.restApis = [new RestApiV2(ctx, true)]; @@ -140,7 +140,9 @@ class RestApiController { } _exposeBootstrapAPIRoutes(server) { - this._registerNodeInfoRoute(server, true); + server.get('/api/info', async (req, res) => { + await this.infoController.getNodeInfo(req, res); + }); } _exposeAPIRoutes(server) { @@ -157,49 +159,6 @@ class RestApiController { }); }); } - - /** - * Register common info route - * @param server - Server instance - * @param isBootstrap - Is this a bootstrap node? - * @private - */ - _registerNodeInfoRoute(server, isBootstrap) { - const { - transport, - config, - } = this.ctx; - - server.get('/api/info', async (req, res) => { - this.logger.api('GET: Node information request received.'); - - try { - const network = await transport.getNetworkInfo(); - const basicConfig = { - version: pjson.version, - blockchain: config.blockchain.blockchain_title, - network, - is_bootstrap: isBootstrap, - }; - - if (!isBootstrap) { - Object.assign(basicConfig, { - node_wallet: config.node_wallet, - erc_725_identity: config.erc725Identity, - }); - } - - res.status(200); - res.send(basicConfig); - } catch (error) { - this.logger.error(`Failed to process /api/info route. ${error}`); - res.status(500); - res.send({ - message: error, - }); - } - }); - } } module.exports = RestApiController; diff --git a/modules/service/rest-api-v2.js b/modules/service/rest-api-v2.js index 9ac88bcff..4e30edb54 100644 --- a/modules/service/rest-api-v2.js +++ b/modules/service/rest-api-v2.js @@ -22,6 +22,7 @@ class RestAPIServiceV2 { this.dcController = ctx.dcController; this.dhController = ctx.dhController; this.dvController = ctx.dvController; + this.infoController = ctx.infoController; this.exportController = ctx.exportController; this.remoteControl = ctx.remoteControl; @@ -53,14 +54,10 @@ class RestAPIServiceV2 { transport, emitter, blockchain, web3, config, } = this.ctx; - this._registerNodeInfoRoute(server, false); + server.get(`/api/${this.version_id}/info`, async (req, res) => { + await this.infoController.getNodeInfo(req, res); + }); - /** - * Data import route - * @param file - file or text data - * @param standard_id - ID of file standard - * (supported standards are listed in this.standards array) - */ server.post(`/api/${this.version_id}/import`, async (req, res) => { await this._importDataset(req, res); }); @@ -390,49 +387,6 @@ class RestAPIServiceV2 { }); } - /** - * Register common info route - * @param server - Server instance - * @param isBootstrap - Is this a bootstrap node? - * @private - */ - _registerNodeInfoRoute(server, isBootstrap) { - const { - transport, - config, - } = this.ctx; - - server.get(`/api/${this.version_id}/info`, async (req, res) => { - this.logger.api('GET: Node information request received.'); - - try { - const network = await transport.getNetworkInfo(); - const basicConfig = { - version: pjson.version, - blockchain: config.blockchain.blockchain_title, - network, - is_bootstrap: isBootstrap, - }; - - if (!isBootstrap) { - Object.assign(basicConfig, { - node_wallet: config.node_wallet, - erc_725_identity: config.erc725Identity, - }); - } - - res.status(200); - res.send(basicConfig); - } catch (error) { - this.logger.error(`Failed to process /api/info route. ${error}`); - res.status(500); - res.send({ - message: error, - }); - } - }); - } - async _getTrail(req, res) { this.logger.api('POST: Trail request received.'); From 97c379a66ac072e5f0c5241de62d6899a0b7496b Mon Sep 17 00:00:00 2001 From: djordjekovac Date: Fri, 5 Jun 2020 12:16:26 +0200 Subject: [PATCH 6/9] Resolved bug with contact fetching from itterative find result (#1272) --- modules/network/kademlia/kademlia.js | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/modules/network/kademlia/kademlia.js b/modules/network/kademlia/kademlia.js index a83242be0..05072a49c 100644 --- a/modules/network/kademlia/kademlia.js +++ b/modules/network/kademlia/kademlia.js @@ -844,12 +844,10 @@ class Kademlia { } _getContactFromInterativeFindNodeResult(result) { - if (result && Array.isArray(result)) { - result.forEach((contact) => { - if (contact[0] !== this.config.identity) { - return contact; - } - }); + for (const contact of result) { + if (contact[0] !== this.config.identity) { + return contact; + } } return null; } From 61861abd100fa31a5a7937ce0a9ac6db64f531c8 Mon Sep 17 00:00:00 2001 From: Djordje Kovacevic Date: Fri, 5 Jun 2020 15:46:01 +0200 Subject: [PATCH 7/9] Resolved issue with too many infura calls --- modules/command/dh/dh-offer-finalized-command.js | 6 +++--- modules/service/dh-service.js | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/command/dh/dh-offer-finalized-command.js b/modules/command/dh/dh-offer-finalized-command.js index a8d745bfe..1cd7e69c6 100644 --- a/modules/command/dh/dh-offer-finalized-command.js +++ b/modules/command/dh/dh-offer-finalized-command.js @@ -53,7 +53,7 @@ class DhOfferFinalizedCommand extends Command { await bid.save({ fields: ['status'] }); this.logger.important(`I've been chosen for offer ${offerId}.`); - await this.remoteControl.onCompletedBids(); + // await this.remoteControl.onCompletedBids(); if (this.config.disableAutoPayouts !== true) { const scheduledTime = @@ -79,7 +79,7 @@ class DhOfferFinalizedCommand extends Command { bid.status = 'NOT_CHOSEN'; await bid.save({ fields: ['status'] }); this.logger.important(`I haven't been chosen for offer ${offerId}.`); - await this.remoteControl.onCompletedBids(); + // await this.remoteControl.onCompletedBids(); return Command.empty(); } } @@ -97,7 +97,7 @@ class DhOfferFinalizedCommand extends Command { const bid = await Models.bids.findOne({ where: { offer_id: offerId } }); bid.status = 'NOT_CHOSEN'; await bid.save({ fields: ['status'] }); - await this.remoteControl.onCompletedBids(); + // await this.remoteControl.onCompletedBids(); return Command.empty(); } diff --git a/modules/service/dh-service.js b/modules/service/dh-service.js index cee5dea49..37240f1d4 100644 --- a/modules/service/dh-service.js +++ b/modules/service/dh-service.js @@ -170,7 +170,7 @@ class DHService { transactional: false, }); - await this.remoteControl.getPendingBids(); + // await this.remoteControl.getPendingBids(); } /** From a2c9d2e2123a2cd977629cd6f09aef553100d96d Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Mon, 8 Jun 2020 13:25:36 +0200 Subject: [PATCH 8/9] update backup and restore script --- scripts/backup.js | 16 ++++++++++++++-- scripts/restore.js | 15 +++++++++++++-- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/scripts/backup.js b/scripts/backup.js index c6d0059cb..98abe75e7 100644 --- a/scripts/backup.js +++ b/scripts/backup.js @@ -2,6 +2,7 @@ const mkdirp = require('mkdirp'); const fs = require('fs'); const argv = require('minimist')(process.argv.slice(2)); const { exec } = require('child_process'); +const path = require('path'); require('dotenv').config(); let environment; @@ -84,6 +85,8 @@ try { if (fs.existsSync(src)) { console.log(`Backup: ${src} -> ${dest}`); fs.copyFileSync(src, dest, (err) => { if (err) { console.error(err); return 1; } }); + } else if (file === 'arango.txt') { + console.log(`Could not find unnecessary backup file ${src}.`); } else { throw Error(`Could not find necessary backup file ${src}, aborting!`); } @@ -127,8 +130,17 @@ try { if (!configFile.database.username) { configFile.database.username = defaultConfig.database.username; } - if (configFile.database.password === undefined) { - configFile.database.password = defaultConfig.database.password; + if (configFile.database.password_file_name) { + // eslint-disable-next-line max-len + const databasePasswordFilePath = path.join(configDirectory, configFile.database.password_file_name); + if (fs.existsSync(databasePasswordFilePath)) { + console.log('Using existing graph database password.'); + configFile.database.password = fs.readFileSync(databasePasswordFilePath).toString(); + } else { + console.log('================================================================'); + console.log(' Using default database password for access '); + console.log('================================================================'); + } } let databaseName; diff --git a/scripts/restore.js b/scripts/restore.js index 7c6d71203..a255c7a86 100644 --- a/scripts/restore.js +++ b/scripts/restore.js @@ -2,6 +2,7 @@ const mkdirp = require('mkdirp'); const fs = require('fs'); const argv = require('minimist')(process.argv.slice(2)); const { exec } = require('child_process'); +const path = require('path'); require('dotenv').config(); if (!process.env.NODE_ENV) { @@ -109,8 +110,18 @@ if (!configFile.database.provider) { if (!configFile.database.username) { configFile.database.username = defaultConfig.database.username; } -if (configFile.database.password === undefined) { - configFile.database.password = defaultConfig.database.password; + +if (configFile.database.password_file_name) { +// eslint-disable-next-line max-len + const databasePasswordFilePath = path.join(configDirectory, configFile.database.password_file_name); + if (fs.existsSync(databasePasswordFilePath)) { + console.log('Using existing graph database password.'); + configFile.database.password = fs.readFileSync(databasePasswordFilePath).toString(); + } else { + console.log('================================================================'); + console.log(' Using default database password for access '); + console.log('================================================================'); + } } switch (configFile.database.provider) { From 0fbd639170eb538d25c2bf7e79cc3a98ae470f3f Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Mon, 8 Jun 2020 15:21:21 +0200 Subject: [PATCH 9/9] add better logging --- scripts/backup.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/backup.js b/scripts/backup.js index 98abe75e7..d894dcb7a 100644 --- a/scripts/backup.js +++ b/scripts/backup.js @@ -86,7 +86,7 @@ try { console.log(`Backup: ${src} -> ${dest}`); fs.copyFileSync(src, dest, (err) => { if (err) { console.error(err); return 1; } }); } else if (file === 'arango.txt') { - console.log(`Could not find unnecessary backup file ${src}.`); + console.log(`Could not find backup file ${src}.`); } else { throw Error(`Could not find necessary backup file ${src}, aborting!`); }