From 08d22c32ae1cb1793b9642dfba145b7ae75e20d9 Mon Sep 17 00:00:00 2001 From: Carmen Fan Date: Mon, 11 Dec 2023 15:41:22 +0000 Subject: [PATCH] ISSUE #426 remove ability to generate toy project - which is the only feature that uses mongodb lib --- tools/bouncer_worker/package.json | 1 - tools/bouncer_worker/src/lib/config.js | 3 - .../bouncer_worker/src/lib/messageDecoder.js | 11 - .../src/queues/jobQueueHandler.js | 47 +-- .../bouncer_worker/src/scripts/rename_toy.js | 45 --- tools/bouncer_worker/src/tasks/importToy.js | 291 ------------------ .../bouncer_worker/src/toyUtils/exportToy.py | 91 ------ .../src/toyUtils/renameFiles.py | 34 -- 8 files changed, 7 insertions(+), 516 deletions(-) delete mode 100644 tools/bouncer_worker/src/scripts/rename_toy.js delete mode 100644 tools/bouncer_worker/src/tasks/importToy.js delete mode 100644 tools/bouncer_worker/src/toyUtils/exportToy.py delete mode 100644 tools/bouncer_worker/src/toyUtils/renameFiles.py diff --git a/tools/bouncer_worker/package.json b/tools/bouncer_worker/package.json index 5fa3e2aa6..8fd0c16b2 100644 --- a/tools/bouncer_worker/package.json +++ b/tools/bouncer_worker/package.json @@ -20,7 +20,6 @@ "crypto-js": "4.1.1", "elastic-apm-node": "3.41.1", "moment": "2.29.1", - "mongodb": "3.6.6", "systeminformation": "5.6.20", "tree-kill": "1.2.2", "uuid": "8.3.2", diff --git a/tools/bouncer_worker/src/lib/config.js b/tools/bouncer_worker/src/lib/config.js index 332e414ca..d1fed239a 100755 --- a/tools/bouncer_worker/src/lib/config.js +++ b/tools/bouncer_worker/src/lib/config.js @@ -36,9 +36,6 @@ const applyDefaultValuesIfUndefined = (config) => { config.rabbitmq.maxWaitTimeMS = config.rabbitmq.maxWaitTimeMS || 5 * 60 * 1000; config.rabbitmq.waitBeforeShutdownMS = config.rabbitmq.waitBeforeShutdownMS || 60000; - // toy project configurations - config.toyModelDir = config.toyModelDir || path.resolve(__dirname, '../../toy'); - // logging related config.logging = config.logging || {}; config.logging.taskLogDir = config.logging.taskLogDir || config.bouncer.log_dir || config.rabbitmq.sharedDir; diff --git a/tools/bouncer_worker/src/lib/messageDecoder.js b/tools/bouncer_worker/src/lib/messageDecoder.js index e1582a3ed..0d46fe09d 100644 --- a/tools/bouncer_worker/src/lib/messageDecoder.js +++ b/tools/bouncer_worker/src/lib/messageDecoder.js @@ -41,16 +41,6 @@ const messageDecoder = (cmd) => { const args = replaceSharedDirPlaceHolder(cmd).split(/\s+/); res = { command: args[0] }; switch (args[0]) { - case 'importToy': - res = { - command: args[0], - database: args[1], - model: args[2], - toyModelID: args[3], - user: args[1], - skipPostProcessing: (args[4] && JSON.parse(args[4])) || {}, - }; - break; case 'import': { // eslint-disable-next-line @@ -73,7 +63,6 @@ const messageDecoder = (cmd) => { cmdParams: [configPath, ...args], database: cmdFile.database, model: cmdFile.project, - toyFed: cmdFile.toyFed, user: cmdFile.owner, ...res, }; diff --git a/tools/bouncer_worker/src/queues/jobQueueHandler.js b/tools/bouncer_worker/src/queues/jobQueueHandler.js index feee84857..1e1977c95 100644 --- a/tools/bouncer_worker/src/queues/jobQueueHandler.js +++ b/tools/bouncer_worker/src/queues/jobQueueHandler.js @@ -20,10 +20,9 @@ const { jobQueueSpecified, logDirExists, sharedDirExists } = require('./common'); -const { importToyModel, validateToyImporterSettings } = require('../tasks/importToy'); -const { ERRCODE_OK, ERRCODE_TOY_IMPORT_FAILED } = require('../constants/errorCodes'); +const { ERRCODE_OK } = require('../constants/errorCodes'); const { config } = require('../lib/config'); -const { generateTreeStash, runBouncerCommand } = require('../tasks/bouncerClient'); +const { runBouncerCommand } = require('../tasks/bouncerClient'); const { messageDecoder } = require('../lib/messageDecoder'); const logger = require('../lib/logger'); @@ -31,30 +30,7 @@ const Handler = {}; const logLabel = { label: 'JOBQ' }; -const importToy = async ({ database, model, toyModelID, skipPostProcessing }, logDir) => { - const returnMessage = { - value: ERRCODE_OK, - database, - project: model, - }; - - try { - await importToyModel(toyModelID, database, model, skipPostProcessing); - - if (!skipPostProcessing.tree) { - logger.info('Toy model imported. Generating tree...', logLabel); - await generateTreeStash(logDir, database, model, 'tree'); - } - } catch (err) { - logger.error(`importToy module error: ${err.message || err}`, logLabel); - returnMessage.value = ERRCODE_TOY_IMPORT_FAILED; - returnMessage.message = err.message || err; - } - - return returnMessage; -}; - -const createFed = async ({ database, model, toyFed, cmdParams }, logDir) => { +const createFed = async ({ database, model, cmdParams }, logDir) => { const returnMessage = { value: ERRCODE_OK, database, @@ -62,12 +38,9 @@ const createFed = async ({ database, model, toyFed, cmdParams }, logDir) => { }; try { returnMessage.value = await runBouncerCommand(logDir, cmdParams); - if (toyFed) { - await importToyModel(toyFed, database, model, { tree: 1 }); - } } catch (err) { logger.error(`Error generating federation: ${err.message || err}`, logLabel); - returnMessage.value = toyFed ? ERRCODE_TOY_IMPORT_FAILED : err; + returnMessage.value = err; returnMessage.message = err.message || err; } @@ -83,19 +56,13 @@ Handler.onMessageReceived = async (cmd, rid, callback) => { return; } - if (cmdMsg.command === 'importToy') { - const message = await importToy(cmdMsg, logDir); - callback(JSON.stringify(message)); - } else { - const message = await createFed(cmdMsg, logDir); - callback(JSON.stringify(message)); - } + const message = await createFed(cmdMsg, logDir); + callback(JSON.stringify(message)); }; Handler.validateConfiguration = (label) => logDirExists(label) && jobQueueSpecified(label) && sharedDirExists(label) - && callbackQueueSpecified(label) - && validateToyImporterSettings(); + && callbackQueueSpecified(label); module.exports = Handler; diff --git a/tools/bouncer_worker/src/scripts/rename_toy.js b/tools/bouncer_worker/src/scripts/rename_toy.js deleted file mode 100644 index 32917a4d6..000000000 --- a/tools/bouncer_worker/src/scripts/rename_toy.js +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Copyright (C) 2020 3D Repo Ltd - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as - * published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see - */ - -const yargs = require('yargs/yargs'); -const { hideBin } = require('yargs/helpers'); -const { exitApplication } = require('../lib/utils'); -const logger = require('../lib/logger'); -const { runRenameScripts } = require('../tasks/importToy'); - -const parseParameters = () => { - const args = yargs(hideBin(process.argv)); - return args.option('teamspace', { - describe: 'teamspace to run this script on', - string: true, - demandOption: true, - }).option('modelId', { - describe: 'modelId to run this script on', - string: true, - demandOption: true, - }).help().argv; -}; - -const runRename = () => { - const { teamspace, modelId } = parseParameters(); - logger.info(`Run renaming script on ${teamspace} ${modelId}`); - - return runRenameScripts(teamspace, modelId); -}; - -runRename().then(() => exitApplication(0)) - .catch((err) => { logger.error(err.message || err); exitApplication(err.code); }); diff --git a/tools/bouncer_worker/src/tasks/importToy.js b/tools/bouncer_worker/src/tasks/importToy.js deleted file mode 100644 index f0f37581a..000000000 --- a/tools/bouncer_worker/src/tasks/importToy.js +++ /dev/null @@ -1,291 +0,0 @@ -/** - * Copyright (C) 2017 3D Repo Ltd - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as - * published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see - */ - -const { MongoClient, GridFSBucket } = require('mongodb'); -const fs = require('fs'); -const { config } = require('../lib/config'); -const runCommand = require('../lib/runCommand'); -const logger = require('../lib/logger'); - -const logLabel = { label: 'TOY' }; - -const accumulateCollectionFiles = (modelDir, modelId) => { - const importCollectionFiles = {}; - - fs.readdirSync(modelDir).forEach((file) => { - const collectionName = file.replace(/\.json$/, ''); - importCollectionFiles[`${modelId}.${collectionName}`] = file; - }); - - return importCollectionFiles; -}; - -const runMongoImport = async (database, collection, filePath) => { - const params = [ - '-j', '8', - '--uri', config.db.connectionString, - '--username', config.db.username, - '--password', config.db.password, - '--authenticationDatabase', 'admin', - '--db', database, - '--collection', collection, - '--file', filePath, - ]; - - try { - await runCommand('mongoimport', params, { verbose: false }); - } catch (errCode) { - logger.error(`Failed to run mongoimport on ${database}:${collection} with data from ${filePath}`, logLabel); - throw errCode; - } -}; - -const importJSON = async (modelDir, database, modelId) => { - const collectionFiles = accumulateCollectionFiles(modelDir, modelId); - - const promises = []; - Object.keys(collectionFiles).forEach((collection) => { - const files = collectionFiles[collection]; - const filePath = `${modelDir}/${files}`; - promises.push(runMongoImport(database, collection, filePath)); - }); - - return Promise.all(promises); -}; - -const updateHistoryAuthorAndDate = (db, database, modelId) => { - const collection = db.collection(`${modelId}.history`); - const update = { - $set: { - author: database, - }, - }; - return collection.updateMany({}, update); -}; - -const updateAuthorAndDate = async (db, database, model, ext) => { - const collection = db.collection(`${model}.${ext}`); - const promises = []; - - const issues = await collection.find().toArray(); - issues.forEach((issue) => { - const updatedIssue = issue; - updatedIssue.owner = database; - const comments = []; - if (updatedIssue.comments) { - updatedIssue.comments.forEach((comment) => { - comments.push({ ...comment, owner: database }); - }); - } - updatedIssue.comments = comments; - promises.push(collection.replaceOne({ _id: updatedIssue._id }, updatedIssue)); - }); - - await Promise.all(promises); -}; - -const renameUnityAssetList = (db, database, model) => { - const collection = db.collection(`${model}.stash.unity3d`); - const promises = []; - const prefix = `/${database}/${model}/`; - collection.find().forEach((asset) => { - const entry = asset; - entry.database = database; - entry.model = model; - for (let i = 0; i < entry.jsonFiles.length; ++i) { - const oldDir = entry.jsonFiles[i]; - const dirArr = oldDir.split('/'); - entry.jsonFiles[i] = prefix + dirArr[dirArr.length - 1]; - } - - for (let i = 0; i < entry.assets.length; ++i) { - const oldDir = entry.assets[i]; - const dirArr = oldDir.split('/'); - entry.assets[i] = prefix + dirArr[dirArr.length - 1]; - } - - promises.push(collection.replaceOne({ _id: entry._id }, entry)); - }); - return Promise.all(promises); -}; - -const renameUnityAsset = (bucket, database, modelId, file) => new Promise((resolve, reject) => { - const rs = bucket.openDownloadStream(file._id); - const bufs = []; - - function finishDownload() { - const unityAsset = JSON.parse(Buffer.concat(bufs)); - - unityAsset.assets = unityAsset.assets || []; - unityAsset.jsonFiles = unityAsset.jsonFiles || []; - - [unityAsset.assets, unityAsset.jsonFiles].forEach((arr) => { - arr.forEach((assetFile, i, assetFiles) => { - const newFileName = assetFile.split('/'); - newFileName[1] = database; - newFileName[2] = modelId; - // eslint-disable-next-line no-param-reassign - assetFiles[i] = newFileName.join('/'); - }); - }); - - unityAsset.database = database; - unityAsset.model = modelId; - - // drop the old one - bucket.delete(file._id, (err) => { - if (err) { - reject(err); - } - - // write the updated unityasset json back to database - const ws = bucket.openUploadStreamWithId(file._id, file.filename); - - ws.end(JSON.stringify(unityAsset), 'utf8', (error) => { - if (error) { - reject(error); - } else { - resolve(); - } - }); - }); - } - - rs.on('data', (d) => bufs.push(d)); - rs.on('end', () => finishDownload()); - rs.on('error', (err) => reject(err)); -}); - -const renameStash = (db, database, modelId, bucketName) => { - const bucket = new GridFSBucket(db, { bucketName }); - const renamePromises = []; - - const files = bucket.find(); - for (let i = 0; i < files.length; ++i) { - const file = files[i]; - let newFileName = file.filename.split('/'); - if (newFileName.length >= 3) { - newFileName[1] = database; - newFileName[2] = modelId; - newFileName = newFileName.join('/'); - file.filename = newFileName; - - renamePromises.push(bucket.rename(file._id, newFileName)); - - // unityAssets.json have the path baked into the file :( - if (newFileName.endsWith('unityAssets.json')) { - renamePromises.push(renameUnityAsset(bucket, database, modelId, file)); - } - } - } - - return Promise.all(renamePromises); -}; - -const renameGroups = async (db, database, modelId) => { - const subModelNameToOldID = { - Lego_House_Architecture: '9f101b80-b4c6-11ec-8b15-4f0e6dbe2114', - Lego_House_Landscape: '9f117b10-b4c6-11ec-8b15-4f0e6dbe2114', - Lego_House_Structure: '9f11f040-b4c6-11ec-8b15-4f0e6dbe2114', - }; - - const collection = db.collection(`${modelId}.groups`); - - const setting = await db.collection('settings').findOne({ _id: modelId }); - - if (!setting) { - throw `Model ${setting} not found`; - } - - const oldIdToNewId = {}; - const submodels = await db.collection('settings').find({ type: 'sample' }).toArray(); - submodels.forEach((subModelSetting) => { - if (subModelNameToOldID[subModelSetting.name]) { - oldIdToNewId[subModelNameToOldID[subModelSetting.name]] = subModelSetting._id; - } - }); - - const groups = await collection.find().toArray(); - const updateObjectPromises = []; - - groups.forEach((group) => { - const grp = group; - grp.author = database; - if (group.objects) { - group.objects.forEach((object) => { - const obj = object; - obj.account = database; - - // if model is fed model, then model id of a group should be - // one of the sub models instead of the id of the fed model itself - obj.model = oldIdToNewId[obj.model] || modelId; - }); - } - updateObjectPromises.push(collection.replaceOne({ _id: group._id }, group)); - }); - - await Promise.all(updateObjectPromises); -}; - -const getURLWithAuth = () => { - const protocol = 'mongodb://'; - const authStr = `${config.db.username}:${encodeURIComponent(config.db.password)}@`; - return config.db.connectionString.replace(protocol, `${protocol}${authStr}`); -}; - -const renameData = async (database, modelId) => { - const dbConn = await MongoClient.connect(getURLWithAuth(), { useUnifiedTopology: true }); - const db = dbConn.db(database); - const promises = []; - - promises.push(renameStash(db, database, modelId, `${modelId}.stash.json_mpc`)); - promises.push(renameStash(db, database, modelId, `${modelId}.stash.src`)); - promises.push(renameStash(db, database, modelId, `${modelId}.stash.unity3d`)); - promises.push(renameUnityAssetList(db, database, modelId)); - - promises.push(updateHistoryAuthorAndDate(db, database, modelId)); - promises.push(updateAuthorAndDate(db, database, modelId, 'issues')); - promises.push(updateAuthorAndDate(db, database, modelId, 'risks')); - promises.push(renameGroups(db, database, modelId)); - - await Promise.all(promises); - - dbConn.close(); -}; - -const ToyImporter = {}; - -ToyImporter.importToyModel = async (toyModelID, database, modelId) => { - const modelDir = `${config.toyModelDir}/${toyModelID}`; - await importJSON(modelDir, database, modelId); - - await renameData(database, modelId); - logger.info(`${toyModelID} imported to ${modelId}`, logLabel); -}; - -ToyImporter.runRenameScripts = renameData; - -ToyImporter.validateToyImporterSettings = () => { - if (!config.toyModelDir) { - logger.error('toyModelDir not specified', logLabel); - return false; - } - - return true; -}; - -module.exports = ToyImporter; diff --git a/tools/bouncer_worker/src/toyUtils/exportToy.py b/tools/bouncer_worker/src/toyUtils/exportToy.py deleted file mode 100644 index 0f130ef53..000000000 --- a/tools/bouncer_worker/src/toyUtils/exportToy.py +++ /dev/null @@ -1,91 +0,0 @@ -#usage: exportToy.py - -import sys -import os -from pymongo import MongoClient -import shutil -import re - - -if len(sys.argv) < 9: - print "Error: Not enough parameters." - print "Usage: "+ sys.argv[0] +" " - sys.exit(1) - -dbAdd = sys.argv[1] -dbPort = sys.argv[2] -dbUsername = sys.argv[3] -dbPassword = sys.argv[4] -fileShareDir = sys.argv[5] -dbName = sys.argv[6] -fileToModelIDs = sys.argv[7] -fedID = sys.argv[8] - -print "db add: " + dbAdd -print "db port: " + dbPort -print "db username: " + dbUsername -print "db pw: " + dbPassword -print "fileshare location: " + fileShareDir -print "db Name: " + dbName -print "model file: " + fileToModelIDs -print "fedId: " + fedID - -from datetime import datetime - -#Find out the model IDs by reading the model ID file -fp = open(fileToModelIDs, "r") -modelList = fp.readlines() -fp.close() - -connString = "mongodb://"+ dbUsername + ":" + dbPassword +"@"+dbAdd + ":" + dbPort + "/" -db = MongoClient(connString)[dbName] -toyFolder = "toy_" + datetime.today().strftime('%Y-%m-%d'); -toyFolderFullDir = os.path.join(fileShareDir , toyFolder); -if not os.path.exists(toyFolderFullDir): - os.makedirs(toyFolderFullDir) - -def grabExternalFilesAndRewrite(collection): - print "Copying files from file share to toy project folder : " + collection - for entry in db[collection].find({"type": "fs"}): - pathToFile = os.path.join(fileShareDir, entry["link"]) - try: - shutil.copy(pathToFile, toyFolderFullDir) - except: - print "copy failed: " + pathToFile + ","+ toyFolderFullDir - newPath = re.sub(r'.+/.+/', toyFolder + "/", entry["link"]) - newPath = re.sub(r'toy_.+/', toyFolder + "/", newPath) - entry["link"] = newPath; - entry["noDelete"] = True; - db[collection].save(entry); - return - -def findModelCols(model): - res = [] - for collection in db.list_collection_names(): - if collection.startswith(model): - res.append(collection) - return res - - -for model in modelList: - model = model.replace('\n', '') - modelDirectory = "toy/" + model - if not os.path.exists(modelDirectory): - os.makedirs(modelDirectory) - for col in findModelCols(model): - if col.endswith(".ref"): - grabExternalFilesAndRewrite(col) - - cmd = "mongoexport /host:" + dbAdd + " /port:" + dbPort + " /username:" + dbUsername + " /password:" + dbPassword + " /authenticationDatabase:admin /db:" + dbName + " /collection:" + col + " /out:" + modelDirectory + "/" + col.replace(model +".", "", 1) + ".json"; - os.system(cmd) - -#export groups, issues, risks, and views from federation -colsInFed = ["groups", "issues", "risks", "views"] - -modelDirectory = "toy/" + fedID -if not os.path.exists(modelDirectory): - os.makedirs(modelDirectory) -for ext in colsInFed: - cmd = "mongoexport /host:" + dbAdd + " /port:" + dbPort + " /username:" + dbUsername + " /password:" + dbPassword + " /authenticationDatabase:admin /db:" + dbName + " /collection:" + fedID + "." + ext + " /out:" + modelDirectory + "/" + ext + ".json"; - os.system(cmd) - diff --git a/tools/bouncer_worker/src/toyUtils/renameFiles.py b/tools/bouncer_worker/src/toyUtils/renameFiles.py deleted file mode 100644 index 74bb96795..000000000 --- a/tools/bouncer_worker/src/toyUtils/renameFiles.py +++ /dev/null @@ -1,34 +0,0 @@ -#usage: exportToy.py - -import sys -import os -from os import walk - -if len(sys.argv) < 3: - print "Error: Not enough parameters." - print "Usage: "+ sys.argv[0] +" " - sys.exit(1) - -fileToModelIDs = sys.argv[1] -fedID = sys.argv[2] - -#Find out the model IDs by reading the model ID file -fp = open(fileToModelIDs, "r") -modelList = fp.readlines() -fp.close() - -for model in modelList: - model = model.replace('\n', '') - modelDirectory = "toy/" + model + "/Laika0115" - for (dirpath, dirnames, filenames) in walk(modelDirectory): - for filename in filenames: - print "Before: " + filename + " after: " + filename.replace(model + ".", "").replace("Laika0115/","") - os.rename(modelDirectory+ "/" + filename, modelDirectory + "/" + filename.replace(model + ".", "").replace("Laika0115/","")) - -modelDirectory = "toy/" + fedID + "/Laika0115" -print "fed directory: " + modelDirectory -for (dirpath, dirnames, filenames) in walk(modelDirectory): - for filename in filenames: - print "Before: " + filename + " after: " + filename.replace(fedID + ".", "").replace("Laika0115/","") - os.rename(modelDirectory+ "/" + filename, modelDirectory + "/" + filename.replace(fedID + ".", "").replace("Laika0115/","")) -