diff --git a/backend/package-lock.json b/backend/package-lock.json index 0956762d3..04a36baac 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -1,12 +1,12 @@ { "name": "seed-test-backend", - "version": "1.7.3", + "version": "1.8.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "seed-test-backend", - "version": "1.7.3", + "version": "1.8.0", "dependencies": { "@cucumber/cucumber": "^9.6.0", "adm-zip": "^0.5.16", @@ -16,7 +16,7 @@ "connect-mongo": "^5.1.0", "cors": "^2.8.5", "cucumber-html-reporter": "^7.2.0", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.21.1", "express-flash": "0.0.2", "express-session": "^1.18.1", @@ -4231,9 +4231,10 @@ } }, "node_modules/dotenv": { - "version": "16.4.5", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", - "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "version": "16.4.7", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "license": "BSD-2-Clause", "engines": { "node": ">=12" }, diff --git a/backend/package.json b/backend/package.json index 1495f530a..205a42e3c 100644 --- a/backend/package.json +++ b/backend/package.json @@ -25,7 +25,7 @@ "connect-mongo": "^5.1.0", "cors": "^2.8.5", "cucumber-html-reporter": "^7.2.0", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.21.1", "express-flash": "0.0.2", "express-session": "^1.18.1", diff --git a/backend/src/database/DbServices.js b/backend/src/database/DbServices.js index 2b56fcf38..9a33ae9b9 100644 --- a/backend/src/database/DbServices.js +++ b/backend/src/database/DbServices.js @@ -7,45 +7,44 @@ /* eslint-disable max-len */ /* eslint-disable no-underscore-dangle */ /* eslint-disable no-unused-vars */ -const { ObjectId } = require("mongodb"); -const str = require("string-to-stream"); -const toString = require("stream-to-string"); -const assert = require("assert"); -const mongodb = require("mongodb"); -const fs = require("fs"); -const Collection = require("mongodb/lib/collection"); -const dbConnection = require("./DbConnector"); -const emptyStory = require("../models/emptyStory"); -const emptyScenario = require("../models/emptyScenario"); -const emptyBackground = require("../models/emptyBackground"); -const AdmZip = require("adm-zip"); - -if (process.env.NODE_ENV !== "production") { - require("dotenv").config(); -} - -const userCollection = "User"; -const storiesCollection = "Stories"; -const repositoriesCollection = "Repositories"; -const stepTypesCollection = "stepTypes"; -const PwResetReqCollection = "PwResetRequests"; -const CustomBlocksCollection = "CustomBlocks"; -const WorkgroupsCollection = "Workgroups"; -const ReportDataCollection = "ReportData"; -const ReportsCollection = "Reports"; +const { ObjectId } = require('mongodb'); +const str = require('string-to-stream'); +const toString = require('stream-to-string'); +const assert = require('assert'); +const mongodb = require('mongodb'); +const fs = require('fs'); +const Collection = require('mongodb/lib/collection'); +const AdmZip = require('adm-zip'); +const os = require('os'); +const dbConnection = require('./DbConnector'); +const emptyStory = require('../models/emptyStory'); +const emptyScenario = require('../models/emptyScenario'); +const emptyBackground = require('../models/emptyBackground'); + +if (process.env.NODE_ENV !== 'production') { + require('dotenv').config(); +} + +const userCollection = 'User'; +const storiesCollection = 'Stories'; +const repositoriesCollection = 'Repositories'; +const stepTypesCollection = 'stepTypes'; +const PwResetReqCollection = 'PwResetRequests'; +const CustomBlocksCollection = 'CustomBlocks'; +const WorkgroupsCollection = 'Workgroups'; +const ReportDataCollection = 'ReportData'; +const ReportsCollection = 'Reports'; // TODO: die eigene Methode replace kann oft durch die MongoMethode findOneAndReplace ersetzt werden! // Opening a pooling Database Connection via DbConnector dbConnection - .establishConnection() - .then(() => - console.log( - "\x1b[32m%s\x1b[0m \x1b[33m%s\x1b[0m", - "Connected to database @", - dbConnection.getConnection().client.s.options.srvHost - ) - ); + .establishConnection() + .then(() => console.log( + '\x1b[32m%s\x1b[0m \x1b[33m%s\x1b[0m', + 'Connected to database @', + dbConnection.getConnection().client.s.options.srvHost + )); /** * Writes a PasswordResetRequest in the DB @@ -53,13 +52,13 @@ dbConnection * @returns inserted request */ async function createResetRequest(request) { - try { - const db = dbConnection.getConnection(); - return await db.collection(PwResetReqCollection).insertOne(request); - } catch (e) { - console.log(`ERROR im ResetRequest: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db.collection(PwResetReqCollection).insertOne(request); + } catch (e) { + console.error(`ERROR im ResetRequest: ${e}`); + throw e; + } } /** @@ -68,14 +67,14 @@ async function createResetRequest(request) { * @returns PasswordResetRequest */ async function getResetRequest(id) { - try { - const db = dbConnection.getConnection(); - const query = { uuid: id.toString() }; - return await db.collection(PwResetReqCollection).findOne(query); - } catch (e) { - console.log(`ERROR in getResetRequest: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const query = { uuid: id.toString() }; + return await db.collection(PwResetReqCollection).findOne(query); + } catch (e) { + console.error(`ERROR in getResetRequest: ${e}`); + throw e; + } } /** @@ -84,14 +83,14 @@ async function getResetRequest(id) { * @returns PasswordResetRequest */ async function getResetRequestByEmail(mail) { - try { - const db = dbConnection.getConnection(); - const query = { email: mail.toString() }; - return await db.collection(PwResetReqCollection).findOne(query); - } catch (e) { - console.log(`ERROR in getResetRequestByEmail: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const query = { email: mail.toString() }; + return await db.collection(PwResetReqCollection).findOne(query); + } catch (e) { + console.error(`ERROR in getResetRequestByEmail: ${e}`); + throw e; + } } /** @@ -100,14 +99,14 @@ async function getResetRequestByEmail(mail) { * @returns deletion Report */ async function deleteRequest(mail) { - try { - const db = dbConnection.getConnection(); - const query = { email: mail.toString() }; - return await db.collection(PwResetReqCollection).deleteOne(query); - } catch (e) { - console.log(`ERROR in deleteRequest: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const query = { email: mail.toString() }; + return await db.collection(PwResetReqCollection).deleteOne(query); + } catch (e) { + console.error(`ERROR in deleteRequest: ${e}`); + throw e; + } } /** @@ -116,15 +115,15 @@ async function deleteRequest(mail) { * @returns User */ async function getUserById(userId) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(userCollection) - .findOne({ _id: new ObjectId(userId) }); - } catch (e) { - console.log(`ERROR in getUserById: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(userCollection) + .findOne({ _id: new ObjectId(userId) }); + } catch (e) { + console.error(`ERROR in getUserById: ${e}`); + throw e; + } } /** @@ -134,29 +133,29 @@ async function getUserById(userId) { * @returns User */ async function getUserByGithub(login, id) { - try { - const db = dbConnection.getConnection(); - if (typeof id === "number") { - const query = { - $and: [{ "github.id": id }, { "github.login": login.toString() }], - }; - return db.collection(userCollection).findOne(query); - } - } catch (e) { - console.log(`ERROR in getUserByGithub: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + if (typeof id === 'number') { + const query = { + $and: [{ 'github.id': id }, { 'github.login': login.toString() }] + }; + return db.collection(userCollection).findOne(query); + } + } catch (e) { + console.error(`ERROR in getUserByGithub: ${e}`); + throw e; + } } async function getUserByEmail(email) { - try { - const db = dbConnection.getConnection(); - const query = { email: email.toString() }; - return await db.collection(userCollection).findOne(query); - } catch (e) { - console.log(`ERROR in getUserByEmail: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const query = { email: email.toString() }; + return await db.collection(userCollection).findOne(query); + } catch (e) { + console.error(`ERROR in getUserByEmail: ${e}`); + throw e; + } } /** @@ -165,28 +164,28 @@ async function getUserByEmail(email) { * @returns */ async function registerUser(user) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(userCollection); - const dbUser = await getUserByEmail(user.email); - let result; - if (dbUser !== null) throw Error("User already exists"); - else if (user.userId) { - // update in register? attacker with userId could re-set anything - result = await collection.updateOne( - { _id: new ObjectId(user.userId) }, - { $set: { email: user.email, password: user.password } } - ); - } else { - delete user.userId; - const query = { email: user.email.toString(), password: user.password }; - result = await collection.insertOne(query); - } - return result; - } catch (e) { - console.log(`ERROR in registerUser: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(userCollection); + const dbUser = await getUserByEmail(user.email); + let result; + if (dbUser !== null) throw Error('User already exists'); + else if (user.userId) { + // update in register? attacker with userId could re-set anything + result = await collection.updateOne( + { _id: new ObjectId(user.userId) }, + { $set: { email: user.email, password: user.password } } + ); + } else { + delete user.userId; + const query = { email: user.email.toString(), password: user.password }; + result = await collection.insertOne(query); + } + return result; + } catch (e) { + console.error(`ERROR in registerUser: ${e}`); + throw e; + } } /** @@ -195,14 +194,14 @@ async function registerUser(user) { * @returns */ async function registerGithubUser(user) { - try { - const db = dbConnection.getConnection(); - user = mongoSanitize(user); - return await db.collection(userCollection).insertOne({ github: user }); - } catch (e) { - console.log(`ERROR in registerGithubUser: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + user = mongoSanitize(user); + return await db.collection(userCollection).insertOne({ github: user }); + } catch (e) { + console.error(`ERROR in registerGithubUser: ${e}`); + throw e; + } } /** @@ -212,54 +211,53 @@ async function registerGithubUser(user) { * @returns */ function replaceUser(newUser, collection) { - const myObjt = { _id: new ObjectId(newUser._id) }; - return collection.findOneAndReplace(myObjt, newUser); + const myObjt = { _id: new ObjectId(newUser._id) }; + return collection.findOneAndReplace(myObjt, newUser); } async function updateGithubToken(objId, updatedToken) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(userCollection) - .updateOne( - { _id: new ObjectId(objId) }, - { $set: { "github.githubToken": updatedToken } } - ); - } catch (e) { - console.log(`ERROR in updateGithubToken: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(userCollection) + .updateOne( + { _id: new ObjectId(objId) }, + { $set: { 'github.githubToken': updatedToken } } + ); + } catch (e) { + console.error(`ERROR in updateGithubToken: ${e}`); + throw e; + } } async function findOrRegisterGithub(user) { - let result = await getUserByGithub(user.login, user.id); - if (!result) result = await registerGithubUser(user); - else result = await updateGithubToken(result._id, user.githubToken); - return result; + let result = await getUserByGithub(user.login, user.id); + if (!result) result = await registerGithubUser(user); + else result = await updateGithubToken(result._id, user.githubToken); + return result; } async function mergeGithub(userId, login, id) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(userCollection); - const githubAccount = await getUserByGithub(login, id); - const seedAccount = await getUserById(userId); - seedAccount.github = githubAccount.github; - if ( - githubAccount.hasOwnProperty("jira") && - !seedAccount.hasOwnProperty("jira") - ) - seedAccount.jira = githubAccount.jira; - - if (githubAccount.email) { - delete githubAccount.github; - await replaceUser(githubAccount, collection); - } else await deleteUser(githubAccount._id); - return await replaceUser(seedAccount, collection); - } catch (e) { - console.log(`ERROR in mergeGithub: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(userCollection); + const githubAccount = await getUserByGithub(login, id); + const seedAccount = await getUserById(userId); + seedAccount.github = githubAccount.github; + if ( + githubAccount.hasOwnProperty('jira') + && !seedAccount.hasOwnProperty('jira') + ) seedAccount.jira = githubAccount.jira; + + if (githubAccount.email) { + delete githubAccount.github; + await replaceUser(githubAccount, collection); + } else await deleteUser(githubAccount._id); + return await replaceUser(seedAccount, collection); + } catch (e) { + console.error(`ERROR in mergeGithub: ${e}`); + throw e; + } } /** @@ -269,8 +267,8 @@ async function mergeGithub(userId, login, id) { * @param collection */ function findStory(storyId, collection) { - const id = new ObjectId(storyId); - return collection.findOne({ _id: id }); + const id = new ObjectId(storyId); + return collection.findOne({ _id: id }); } /** @@ -280,199 +278,203 @@ function findStory(storyId, collection) { * @returns */ function replace(story, collection) { - const filter = { - _id: new ObjectId(story._id.toString()), - }; - story._id = new ObjectId(story._id); - return collection.findOneAndReplace(filter, story); + const filter = { + _id: new ObjectId(story._id.toString()) + }; + story._id = new ObjectId(story._id); + return collection.findOneAndReplace(filter, story); } async function disconnectGithub(user) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(userCollection); - return await replaceUser(user, collection); - } catch (e) { - console.log(`ERROR in disconnectGithub: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(userCollection); + return await replaceUser(user, collection); + } catch (e) { + console.error(`ERROR in disconnectGithub: ${e}`); + throw e; + } } async function updateStory( - updatedStory, - session = undefined, - client = undefined + updatedStory, + session = undefined, + client = undefined ) { - try { - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - updatedStory._id = new ObjectId(updatedStory._id.toString()); - return await collection.findOneAndReplace( - { _id: new ObjectId(updatedStory._id.toString()) }, - updatedStory, - { returnDocument: "after", session: session || undefined } - ); - } catch (e) { - console.log(`ERROR updateStory: ${e}`); - throw e; - } + try { + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + updatedStory._id = new ObjectId(updatedStory._id.toString()); + return await collection.findOneAndReplace( + { _id: new ObjectId(updatedStory._id.toString()) }, + updatedStory, + { returnDocument: 'after', session: session || undefined } + ); + } catch (e) { + console.error(`ERROR updateStory: ${e}`); + throw e; + } } // get One Story // searches the story either by mongoDB _id:new ObjectId() or by story_id (from GitHub or Jira) async function getOneStory(storyId) { - let query; - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - console.log(storyId); - if (typeof storyId === "number") { - query = { - story_id: storyId, - }; - } else { - query = { - _id: new ObjectId(storyId.toString()), - }; - } - return await collection.findOne(query); - } catch (e) { - // console.warn(`ERROR in getOneStory: ${e}`); - // throw e; - // if there is no Story (e.g. if its a new GitHub repo), return null - console.log("if no match return null"); - return null; - } + let query; + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + if (typeof storyId === 'number') { + query = { + story_id: storyId + }; + } else { + query = { + _id: new ObjectId(storyId.toString()) + }; + } + return await collection.findOne(query); + } catch (e) { + // console.warn(`ERROR in getOneStory: ${e}`); + // throw e; + // if there is no Story (e.g. if its a new GitHub repo), return null + console.log('if no match return null'); + return null; + } } async function createStoryGroup( - repoID, - name, - members, - sequence, - session = undefined, - client = undefined + repoID, + name, + members, + sequence, + session = undefined, + client = undefined, + xayTestSet = false ) { - try { - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - const groups = await db.collection(repositoriesCollection).findOneAndUpdate( - { _id: new ObjectId(repoID) }, - { - $push: { - groups: { - _id: new ObjectId(), - name, - member_stories: members, - isSequential: sequence, - }, - }, - }, - { upsert: true, projection: { groups: 1 }, session: session || undefined } - ); - return groups.groups.slice(-1)._id; - } catch (e) { - console.log(`ERROR in createStoryGroup: ${e}`); - } + try { + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + const group = { + _id: new ObjectId(), + name, + member_stories: members, + isSequential: sequence + }; + if (xayTestSet) { + group.xayTestSet = true; + } + const groups = await db.collection(repositoriesCollection).findOneAndUpdate( + { _id: new ObjectId(repoID) }, + { + $push: { + groups: group + } + }, + { upsert: true, projection: { groups: 1 }, session: session || undefined } + ); + return groups.groups.slice(-1)._id; + } catch (e) { + console.error(`ERROR in createStoryGroup: ${e}`); + } } async function updateStoryGroup( - repoId, - groupId, - updatedGroup, - session = undefined, - client = undefined + repoId, + groupId, + updatedGroup, + session = undefined, + client = undefined ) { - try { - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - updatedGroup._id = new ObjectId(updatedGroup._id); - const collection = await db.collection(repositoriesCollection); - const repo = await collection.findOne({ _id: new ObjectId(repoId) }); - // leave with double equal: - const index = repo.groups.findIndex((o) => o._id == groupId); - repo.groups[index] = updatedGroup; - await collection.updateOne({ _id: new ObjectId(repoId) }, { $set: repo }, { session: session || undefined }); - return updatedGroup; - } catch (e) { - console.log(`ERROR in updateStoryGroup: ${e}`); - } + try { + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + updatedGroup._id = new ObjectId(updatedGroup._id); + const collection = await db.collection(repositoriesCollection); + const repo = await collection.findOne({ _id: new ObjectId(repoId) }); + // leave with double equal: + const index = repo.groups.findIndex((o) => o._id == groupId); + repo.groups[index] = updatedGroup; + await collection.updateOne({ _id: new ObjectId(repoId) }, { $set: repo }, { session: session || undefined }); + return updatedGroup; + } catch (e) { + console.error(`ERROR in updateStoryGroup: ${e}`); + } } async function deleteStoryGroup(repoId, groupId) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(repositoriesCollection); - const repo = await collection.findOne({ _id: new ObjectId(repoId) }); - // leave with double equal: - const index = repo.groups.findIndex((o) => o._id == groupId); - repo.groups.splice(index, 1); - await collection.updateOne({ _id: new ObjectId(repoId) }, { $set: repo }); - return null; - } catch (e) { - console.log(`ERROR in deleteStoryGroup: ${e}`); - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(repositoriesCollection); + const repo = await collection.findOne({ _id: new ObjectId(repoId) }); + // leave with double equal: + const index = repo.groups.findIndex((o) => o._id == groupId); + repo.groups.splice(index, 1); + await collection.updateOne({ _id: new ObjectId(repoId) }, { $set: repo }); + return null; + } catch (e) { + console.error(`ERROR in deleteStoryGroup: ${e}`); + } } async function getAllStoryGroups(repoId) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }, { projection: { groups: 1 } }); - } catch (e) { - console.log(`ERROR in getAllStoryGroups: ${e}`); - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }, { projection: { groups: 1 } }); + } catch (e) { + console.error(`ERROR in getAllStoryGroups: ${e}`); + } } async function getOneStoryGroup(repoId, groupId) { - try { - const groups = await getAllStoryGroups(repoId); - return groups.groups.find((o) => o._id == groupId); - } catch (e) { - console.log(`ERROR in getOneStoryGroup: ${e}`); - } + try { + const groups = await getAllStoryGroups(repoId); + return groups.groups.find((o) => o._id == groupId); + } catch (e) { + console.error(`ERROR in getOneStoryGroup: ${e}`); + } } async function addToStoryGroup(repoId, groupId, storyId) { - try { - const group = await getOneStoryGroup(repoId, groupId); - group.member_stories.push(storyId); - await updateStoryGroup(repoId, groupId, group); - return group; - } catch (e) { - console.log(`ERROR in AddToStoryGroup: ${e}`); - } + try { + const group = await getOneStoryGroup(repoId, groupId); + group.member_stories.push(storyId); + await updateStoryGroup(repoId, groupId, group); + return group; + } catch (e) { + console.error(`ERROR in AddToStoryGroup: ${e}`); + } } async function removeFromStoryGroup(repoId, groupId, storyId) { - try { - const group = await getOneStoryGroup(repoId, groupId); - group.member_stories.splice(group.indexOf(storyId), 1); - await updateStoryGroup(repoId, groupId, group); - return group; - } catch (e) { - console.log(`ERROR in removeFromStoryGroup: ${e}`); - } + try { + const group = await getOneStoryGroup(repoId, groupId); + group.member_stories.splice(group.indexOf(storyId), 1); + await updateStoryGroup(repoId, groupId, group); + return group; + } catch (e) { + console.error(`ERROR in removeFromStoryGroup: ${e}`); + } } async function updateStoryGroupsArray(repoId, groupsArray) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(repositoriesCollection) - .findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $set: { groups: groupsArray } }, - { projection: { groups: 1 } } - ); - } catch (e) { - console.log(`ERROR in updateStoryGroupsArray: ${e}`); - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(repositoriesCollection) + .findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $set: { groups: groupsArray } }, + { projection: { groups: 1 } } + ); + } catch (e) { + console.error(`ERROR in updateStoryGroupsArray: ${e}`); + } } /** @@ -480,73 +482,73 @@ async function updateStoryGroupsArray(repoId, groupsArray) { * @returns all StepTypeObjects */ async function showSteptypes() { - try { - const db = dbConnection.getConnection(); - return await db.collection(stepTypesCollection).find({}).toArray(); - } catch (e) { - console.log(`ERROR in showSteptypes: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db.collection(stepTypesCollection).find({}) + .toArray(); + } catch (e) { + console.error(`ERROR in showSteptypes: ${e}`); + throw e; + } } // UPDATE Background async function updateBackground(storyId, updatedBackground) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - return collection.findOneAndUpdate( - { _id: new ObjectId(storyId) }, - { $set: { background: updatedBackground } }, - { returnDocument: "after", upsert: true } - ); - } catch (e) { - console.log(`ERROR in updateBackground: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + return collection.findOneAndUpdate( + { _id: new ObjectId(storyId) }, + { $set: { background: updatedBackground } }, + { returnDocument: 'after', upsert: true } + ); + } catch (e) { + console.error(`ERROR in updateBackground: ${e}`); + throw e; + } } // DELETE Background async function deleteBackground(storyId) { - return updateBackground(storyId, emptyBackground()); + return updateBackground(storyId, emptyBackground()); } async function createStory( - storyTitle, - storyDescription, - repoId, - session = undefined, - client = undefined + storyTitle, + storyDescription, + repoId, + session = undefined, + client = undefined ) { - const iNumberArray = []; - let finalIssueNumber = 1; - try { - console.log(session); - console.log(client); - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - if (repo && repo.stories.length > 0) { - for (const storyId of repo.stories) { - const story = await db - .collection(storiesCollection) - .findOne({ _id: new ObjectId(storyId) }); - iNumberArray.push(story.issue_number); - } - finalIssueNumber = - iNumberArray.findIndex((_, i) => !iNumberArray.includes(i + 1)) + 1; - if (finalIssueNumber === 0) finalIssueNumber = iNumberArray.length + 1; - } - const story = emptyStory(storyTitle, storyDescription); - story.issue_number = finalIssueNumber; - const result = await db.collection(storiesCollection).insertOne(story, { session: session || undefined }); - return result.insertedId; - } catch (e) { - console.log(`ERROR in createStory: ${e}`); - throw e; - } + const iNumberArray = []; + let finalIssueNumber = 1; + try { + console.log(session); + console.log(client); + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + if (repo && repo.stories.length > 0) { + for (const storyId of repo.stories) { + const story = await db + .collection(storiesCollection) + .findOne({ _id: new ObjectId(storyId) }); + iNumberArray.push(story.issue_number); + } + finalIssueNumber = iNumberArray.findIndex((_, i) => !iNumberArray.includes(i + 1)) + 1; + if (finalIssueNumber === 0) finalIssueNumber = iNumberArray.length + 1; + } + const story = emptyStory(storyTitle, storyDescription); + story.issue_number = finalIssueNumber; + const result = await db.collection(storiesCollection).insertOne(story, { session: session || undefined }); + return result.insertedId; + } catch (e) { + console.error(`ERROR in createStory: ${e}`); + throw e; + } } /** @@ -556,157 +558,155 @@ async function createStory( * @returns deleteReport */ async function deleteStory(repoId, storyId) { - // TODO refactor use promise all - try { - const db = dbConnection.getConnection(); - const repo = await db.collection(repositoriesCollection); - try { - const groups = await repo.findOne( - { _id: new ObjectId(repoId) }, - { projection: { groups: 1 } } - ); - for (const index in groups.groups) - groups.groups[index].member_stories = groups.groups[ - index - ].member_stories.filter((story) => story !== storyId); - await repo.findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $set: { groups: groups.groups } } - ); - try { - await repo.findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $pull: { stories: new ObjectId(storyId) } } - ); - try { - return await db - .collection(storiesCollection) - .findOneAndDelete({ _id: new ObjectId(storyId) }); - } catch (e) { - console.log( - `ERROR in deleteStory, couldn't delete the Story. Trying to recreate the Repo- and GroupsEntry: ${e}` - ); - // TODO: recreate both Entrys - } - } catch (e) { - console.log( - `ERROR in deleteStory, couldn't delete the Story_id in the Repo. Trying to recreate the deleted GroupEntry : ${e}` - ); - // TODO: recreate the GroupEntry - } - } catch (e) { - console.log(`ERROR in deleteStory, couldn't delete GroupEntry: ${e}`); - throw e; - } - } catch (e) { - console.log(`ERROR in deleteStory, couldnt establish a Connection: ${e}`); - throw e; - } + // TODO refactor use promise all + try { + const db = dbConnection.getConnection(); + const repo = await db.collection(repositoriesCollection); + try { + const groups = await repo.findOne( + { _id: new ObjectId(repoId) }, + { projection: { groups: 1 } } + ); + for (const index in groups.groups) groups.groups[index].member_stories = groups.groups[ + index + ].member_stories.filter((story) => story !== storyId); + await repo.findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $set: { groups: groups.groups } } + ); + try { + await repo.findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $pull: { stories: new ObjectId(storyId) } } + ); + try { + return await db + .collection(storiesCollection) + .findOneAndDelete({ _id: new ObjectId(storyId) }); + } catch (e) { + console.log( + `ERROR in deleteStory, couldn't delete the Story. Trying to recreate the Repo- and GroupsEntry: ${e}` + ); + // TODO: recreate both Entrys + } + } catch (e) { + console.log( + `ERROR in deleteStory, couldn't delete the Story_id in the Repo. Trying to recreate the deleted GroupEntry : ${e}` + ); + // TODO: recreate the GroupEntry + } + } catch (e) { + console.error(`ERROR in deleteStory, couldn't delete GroupEntry: ${e}`); + throw e; + } + } catch (e) { + console.error(`ERROR in deleteStory, couldnt establish a Connection: ${e}`); + throw e; + } } async function insertStoryIdIntoRepo( - storyId, - repoId, - session = undefined, - client = undefined + storyId, + repoId, + session = undefined, + client = undefined ) { - try { - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - return await db - .collection(repositoriesCollection) - .findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $push: { stories: new ObjectId(storyId) } }, - { session: session || undefined } - ); - } catch (e) { - console.log(`ERROR in insertStoryIdIntoRepo: ${e}`); - throw e; - } + try { + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + return await db + .collection(repositoriesCollection) + .findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $push: { stories: new ObjectId(storyId) } }, + { session: session || undefined } + ); + } catch (e) { + console.error(`ERROR in insertStoryIdIntoRepo: ${e}`); + throw e; + } } async function updateScenarioList(storyId, scenarioList) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(storiesCollection) - .findOneAndUpdate( - { _id: new ObjectId(storyId) }, - { $set: { scenarios: scenarioList } } - ); - } catch (e) { - console.log(`ERROR in insertStoryIdIntoRepo: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(storiesCollection) + .findOneAndUpdate( + { _id: new ObjectId(storyId) }, + { $set: { scenarios: scenarioList } } + ); + } catch (e) { + console.error(`ERROR in insertStoryIdIntoRepo: ${e}`); + throw e; + } } async function getAllStoriesOfRepo(repoId) { - const storiesArray = []; - try { - const db = dbConnection.getConnection(); - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - if (repo) - for (const entry of repo.stories) { - const story = await db - .collection(storiesCollection) - .findOne({ _id: new ObjectId(entry) }); - storiesArray.push(story); - } - return storiesArray; - } catch (e) { - console.log(`ERROR in getAllStoriesOfRepo: ${e}`); - throw e; - } + const storiesArray = []; + try { + const db = dbConnection.getConnection(); + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + if (repo) for (const entry of repo.stories) { + const story = await db + .collection(storiesCollection) + .findOne({ _id: new ObjectId(entry) }); + storiesArray.push(story); + } + return storiesArray; + } catch (e) { + console.error(`ERROR in getAllStoriesOfRepo: ${e}`); + throw e; + } } // GET ONE Scenario async function getOneScenario(storyId, scenarioId) { - try { - const db = dbConnection.getConnection(); - const scenarios = await db - .collection(storiesCollection) - .findOne( - { _id: new ObjectId(storyId), "scenarios.scenario_id": scenarioId }, - { projection: { scenarios: 1 } } - ); - return scenarios.scenarios.find((o) => o.scenario_id === scenarioId); - } catch (e) { - console.log(`ERROR in getOneScenario: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const scenarios = await db + .collection(storiesCollection) + .findOne( + { _id: new ObjectId(storyId), 'scenarios.scenario_id': scenarioId }, + { projection: { scenarios: 1 } } + ); + return scenarios.scenarios.find((o) => o.scenario_id === scenarioId); + } catch (e) { + console.error(`ERROR in getOneScenario: ${e}`); + throw e; + } } // CREATE Scenario async function createScenario(storyId, scenarioTitle) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - const story = await findStory(storyId, collection); - const tmpScenario = emptyScenario(); - if (story.scenarios.length === 0) { - tmpScenario.name = scenarioTitle; - story.scenarios.push(tmpScenario); - } else { - let newScenId = 0; - for (const scenario of story.scenarios) { - if (scenario.scenario_id > newScenId) { - newScenId = scenario.scenario_id; - } - } - tmpScenario.scenario_id = newScenId + 1; - tmpScenario.name = scenarioTitle; - story.scenarios.push(tmpScenario); - } - await replace(story, collection); - return tmpScenario; - } catch (e) { - console.log(`ERROR in createScenario: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + const story = await findStory(storyId, collection); + const tmpScenario = emptyScenario(); + if (story.scenarios.length === 0) { + tmpScenario.name = scenarioTitle; + story.scenarios.push(tmpScenario); + } else { + let newScenId = 0; + for (const scenario of story.scenarios) { + if (scenario.scenario_id > newScenId) { + newScenId = scenario.scenario_id; + } + } + tmpScenario.scenario_id = newScenId + 1; + tmpScenario.name = scenarioTitle; + story.scenarios.push(tmpScenario); + } + await replace(story, collection); + return tmpScenario; + } catch (e) { + console.error(`ERROR in createScenario: ${e}`); + throw e; + } } // PUT Scenario @@ -717,146 +717,144 @@ async function createScenario(storyId, scenarioTitle) { * @returns updated Scenario */ async function updateScenario(storyId, updatedScenario) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - return collection - .findOneAndUpdate( - { _id: new ObjectId(storyId) }, - { $set: { "scenarios.$[it]": updatedScenario } }, - { - arrayFilters: [{ "it.scenario_id": updatedScenario.scenario_id }], - returnDocument: "after", - upsert: true, - projection: { scenarios: true }, - } - ) // Options - .then((result) => - result.scenarios.find( - (scen) => scen.scenario_id == updatedScenario.scenario_id - ) - ); - } catch (e) { - console.log(`ERROR in updateScenario: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + return collection + .findOneAndUpdate( + { _id: new ObjectId(storyId) }, + { $set: { 'scenarios.$[it]': updatedScenario } }, + { + arrayFilters: [{ 'it.scenario_id': updatedScenario.scenario_id }], + returnDocument: 'after', + upsert: true, + projection: { scenarios: true } + } + ) // Options + .then((result) => result.scenarios.find( + (scen) => scen.scenario_id == updatedScenario.scenario_id + )); + } catch (e) { + console.error(`ERROR in updateScenario: ${e}`); + throw e; + } } // DELETE Scenario async function deleteScenario(storyId, scenarioId) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - return collection - .findOneAndUpdate( - { _id: new ObjectId(storyId) }, - { $pull: { scenarios: { scenario_id: scenarioId } } }, - { returnDocument: "after" } - ) - .then((res) => res); - } catch (e) { - console.log(`ERROR in deleteScenario: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + return collection + .findOneAndUpdate( + { _id: new ObjectId(storyId) }, + { $pull: { scenarios: { scenario_id: scenarioId } } }, + { returnDocument: 'after' } + ) + .then((res) => res); + } catch (e) { + console.error(`ERROR in deleteScenario: ${e}`); + throw e; + } } // gets all Repositories of one user async function getRepository(userID) { - try { - const myObjt = { owner: new ObjectId(userID) }; - const db = dbConnection.getConnection(); - const wGCollection = await db.collection(WorkgroupsCollection); - const repoCollection = await db.collection(repositoriesCollection); - const user = await db - .collection(userCollection) - .findOne({ _id: new ObjectId(userID) }); - const positiveWorkgroups = await wGCollection - .find({ Members: { $elemMatch: { email: user.email, canEdit: true } } }) - .toArray(); - const PWgArray = positiveWorkgroups.map( - (entry) => new ObjectId(entry.Repo) - ); - const PWgRepos = await repoCollection - .find({ _id: { $in: PWgArray } }) - .toArray(); - PWgRepos.forEach((element) => { - element.canEdit = true; - }); - const negativeWorkgroups = await wGCollection - .find({ Members: { $elemMatch: { email: user.email, canEdit: false } } }) - .toArray(); - const NWgArray = negativeWorkgroups.map( - (entry) => new ObjectId(entry.Repo) - ); - const NWgRepos = await repoCollection - .find({ _id: { $in: NWgArray } }) - .toArray(); - NWgRepos.forEach((element) => { - element.canEdit = false; - }); - const result = await repoCollection.find(myObjt).toArray(); - result.forEach((element) => { - element.canEdit = true; - }); - return result.concat(PWgRepos, NWgRepos); - } catch (e) { - console.log(`ERROR in getRepository${e}`); - throw e; - } + try { + const myObjt = { owner: new ObjectId(userID) }; + const db = dbConnection.getConnection(); + const wGCollection = await db.collection(WorkgroupsCollection); + const repoCollection = await db.collection(repositoriesCollection); + const user = await db + .collection(userCollection) + .findOne({ _id: new ObjectId(userID) }); + const positiveWorkgroups = await wGCollection + .find({ Members: { $elemMatch: { email: user.email, canEdit: true } } }) + .toArray(); + const PWgArray = positiveWorkgroups.map( + (entry) => new ObjectId(entry.Repo) + ); + const PWgRepos = await repoCollection + .find({ _id: { $in: PWgArray } }) + .toArray(); + PWgRepos.forEach((element) => { + element.canEdit = true; + }); + const negativeWorkgroups = await wGCollection + .find({ Members: { $elemMatch: { email: user.email, canEdit: false } } }) + .toArray(); + const NWgArray = negativeWorkgroups.map( + (entry) => new ObjectId(entry.Repo) + ); + const NWgRepos = await repoCollection + .find({ _id: { $in: NWgArray } }) + .toArray(); + NWgRepos.forEach((element) => { + element.canEdit = false; + }); + const result = await repoCollection.find(myObjt).toArray(); + result.forEach((element) => { + element.canEdit = true; + }); + return result.concat(PWgRepos, NWgRepos); + } catch (e) { + console.error(`ERROR in getRepository${e}`); + throw e; + } } // deletes all Repositories of own User async function deleteRepositorys(ownerID) { - // TODO: Dringend! Die eingetragenen Storys und die Einträge in Stories und Groups müssen gelöscht werden - try { - const query = { owner: new ObjectId(ownerID) }; - const db = dbConnection.getConnection(); - const collection = await db.collection(repositoriesCollection); - return await collection.deleteMany(query); - } catch (e) { - console.log(`ERROR in deleteRepositorys${e}`); - throw e; - } + // TODO: Dringend! Die eingetragenen Storys und die Einträge in Stories und Groups müssen gelöscht werden + try { + const query = { owner: new ObjectId(ownerID) }; + const db = dbConnection.getConnection(); + const collection = await db.collection(repositoriesCollection); + return await collection.deleteMany(query); + } catch (e) { + console.error(`ERROR in deleteRepositorys${e}`); + throw e; + } } async function deleteRepository(repoId, ownerId) { - // TODO: Dringend! Die eingetragenen Storys und die Einträge in Stories und Groups müssen gelöscht werden - try { - // todo delete Workgroup, delete story Reports - const db = dbConnection.getConnection(); - const collectionRepo = await db.collection(repositoriesCollection); - // const collectionStory = await db.collection(storiesCollection) - // const repo = await collectionRepo.findOne({ owner: new ObjectId(ownerId), _id: new ObjectId(repoId)}) - // const storIds = repo.stories.map((val)=>new ObjectId(val)) - // const storiesRes = await collectionStory.deleteMany({_id:{$in: storIds}}) - return collectionRepo.deleteOne({ - owner: new ObjectId(ownerId), - _id: new ObjectId(repoId), - }); - } catch (e) { - console.log(`ERROR in deleteRepository${e}`); - throw e; - } + // TODO: Dringend! Die eingetragenen Storys und die Einträge in Stories und Groups müssen gelöscht werden + try { + // todo delete Workgroup, delete story Reports + const db = dbConnection.getConnection(); + const collectionRepo = await db.collection(repositoriesCollection); + // const collectionStory = await db.collection(storiesCollection) + // const repo = await collectionRepo.findOne({ owner: new ObjectId(ownerId), _id: new ObjectId(repoId)}) + // const storIds = repo.stories.map((val)=>new ObjectId(val)) + // const storiesRes = await collectionStory.deleteMany({_id:{$in: storIds}}) + return collectionRepo.deleteOne({ + owner: new ObjectId(ownerId), + _id: new ObjectId(repoId) + }); + } catch (e) { + console.error(`ERROR in deleteRepository${e}`); + throw e; + } } async function getOneRepository(ownerId, name) { - try { - const repo = { owner: new ObjectId(ownerId), repoName: name }; - const db = dbConnection.getConnection(); - return db.collection(repositoriesCollection).findOne(repo); - } catch (e) { - console.log(`ERROR in getOneRepository${e}`); - } + try { + const repo = { owner: new ObjectId(ownerId), repoName: name }; + const db = dbConnection.getConnection(); + return db.collection(repositoriesCollection).findOne(repo); + } catch (e) { + console.error(`ERROR in getOneRepository${e}`); + } } async function getOneRepositoryById(repoId) { - try { - const repo = { _id: new ObjectId(repoId) }; - const db = dbConnection.getConnection(); - return db.collection(repositoriesCollection).findOne(repo); - } catch (e) { - console.log(`ERROR in getOneRepository${e}`); - } + try { + const repo = { _id: new ObjectId(repoId) }; + const db = dbConnection.getConnection(); + return db.collection(repositoriesCollection).findOne(repo); + } catch (e) { + console.error(`ERROR in getOneRepository${e}`); + } } /** @@ -865,16 +863,16 @@ async function getOneRepositoryById(repoId) { * @returns one GitRepositoryObject */ async function getOneGitRepository(name) { - try { - const query = { - repoName: name.toString(), - repoType: "github", - }; - const db = dbConnection.getConnection(); - return await db.collection(repositoriesCollection).findOne(query); - } catch (e) { - console.log(`ERROR in getOneGitRepository${e}`); - } + try { + const query = { + repoName: name.toString(), + repoType: 'github' + }; + const db = dbConnection.getConnection(); + return await db.collection(repositoriesCollection).findOne(query); + } catch (e) { + console.error(`ERROR in getOneGitRepository${e}`); + } } /** @@ -883,13 +881,13 @@ async function getOneGitRepository(name) { * @returns one JiraRepositoryObject */ async function getOneJiraRepository(name) { - try { - const query = { repoName: name.toString(), repoType: "jira" }; - const db = dbConnection.getConnection(); - return await db.collection(repositoriesCollection).findOne(query); - } catch (e) { - console.log(`ERROR in getOneGitRepository${e}`); - } + try { + const query = { repoName: name.toString(), repoType: 'jira' }; + const db = dbConnection.getConnection(); + return await db.collection(repositoriesCollection).findOne(query); + } catch (e) { + console.error(`ERROR in getOneGitRepository${e}`); + } } /** @@ -898,63 +896,68 @@ async function getOneJiraRepository(name) { * @returns all RepositoryObjects from the corresponding source */ async function getAllSourceReposFromDb(source) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(repositoriesCollection) - .find({ repoType: source }) - .toArray(); - } catch (e) { - console.log(`ERROR in getAllSourceReposFromDb ${e}`); - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(repositoriesCollection) + .find({ repoType: source }) + .toArray(); + } catch (e) { + console.error(`ERROR in getAllSourceReposFromDb ${e}`); + } } async function createRepo( - ownerId, - name, - session = undefined, - client = undefined + ownerId, + name, + session = undefined, + client = undefined ) { - try { - const emptyRepo = { - owner: new ObjectId(ownerId), - repoName: name.toString(), - stories: [], - repoType: "db", - customBlocks: [], - groups: [], - }; - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - console.log(session, "In DB: ", db); - const collection = await db.collection(repositoriesCollection); - const query = { owner: new ObjectId(ownerId), repoName: name.toString() }; - const existingRepo = await collection.findOne(query); - if (existingRepo !== null || !name) - return "Sie besitzen bereits ein Repository mit diesem Namen!"; // existing or empty name - return collection.insertOne(emptyRepo, { session: session || undefined }).then((ret) => ret.insertedId); - } catch (e) { - console.log(`ERROR in createRepo${e}`); - } + try { + const emptyRepo = { + owner: new ObjectId(ownerId), + repoName: name.toString(), + stories: [], + repoType: 'db', + customBlocks: [], + groups: [] + }; + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + console.log(session, 'In DB: ', db); + const collection = await db.collection(repositoriesCollection); + const query = { owner: new ObjectId(ownerId), repoName: name.toString() }; + const existingRepo = await collection.findOne(query); + if (existingRepo !== null || !name) return 'Sie besitzen bereits ein Repository mit diesem Namen!'; // existing or empty name + return collection.insertOne(emptyRepo, { session: session || undefined }).then((ret) => ret.insertedId); + } catch (e) { + console.error(`ERROR in createRepo${e}`); + } } async function getRepoSettingsById(repoId) { - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(repositoriesCollection); + if (!repoId || repoId.length !== 24) { + console.error(`Invalid repository ID: ${repoId}. Must be a 24-character hex string.`); + return null; + } - const repo = await collection.findOne({ _id: new ObjectId(repoId) }); + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(repositoriesCollection); + + // Safely create ObjectId from validated hex string + const repo = await collection.findOne({ _id: ObjectId.createFromHexString(repoId) }); - if (!repo) { - console.log(`Kein Repository gefunden mit der ID: ${repoId}`); - return null; - } - return repo.settings; - } catch (e) { - console.error(`Fehler beim Abrufen der Repository-Einstellungen: ${e}`); - throw e; - } + if (!repo) { + console.log(`No repository found with the ID: ${repoId}`); + return null; + } + return repo.settings; + } catch (e) { + console.error(`Error retrieving repository settings: ${e}`); + throw e; + } } /** @@ -982,580 +985,576 @@ async function updateRepository(repoID, newName, globalSettings) { repoFilter, { $set: updateFields }, { returnDocument: 'after' } - ) + ); return updatedRepo.value; } catch (e) { - console.log(`ERROR updateRepository: ${e}`); + console.error(`ERROR updateRepository: ${e}`); throw e; } } - async function createJiraRepo(repoName) { - try { - const db = dbConnection.getConnection(); - const repo = { - owner: "", - repoName, - stories: [], - repoType: "jira", - customBlocks: [], - }; - return await db.collection(repositoriesCollection).insertOne(repo); - } catch (e) { - console.log(`ERROR in createJiraRepo ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const repo = { + owner: '', + repoName, + stories: [], + repoType: 'jira', + customBlocks: [] + }; + return await db.collection(repositoriesCollection).insertOne(repo); + } catch (e) { + console.error(`ERROR in createJiraRepo ${e}`); + throw e; + } } async function createGitRepo(gitOwnerId, repoName, userGithubId, userId) { - let newRepo; - try { - const db = dbConnection.getConnection(); - newRepo = { - owner: "", - gitOwner: gitOwnerId, - repoName, - stories: [], - repoType: "github", - customBlocks: [], - }; - if (userGithubId === gitOwnerId) newRepo.owner = new ObjectId(userId); - return await db.collection(repositoriesCollection).insertOne(newRepo); - } catch (e) { - console.log(`ERROR in createGitRepo${e}`); - throw e; - } + let newRepo; + try { + const db = dbConnection.getConnection(); + newRepo = { + owner: '', + gitOwner: gitOwnerId, + repoName, + stories: [], + repoType: 'github', + customBlocks: [] + }; + if (userGithubId === gitOwnerId) newRepo.owner = new ObjectId(userId); + return await db.collection(repositoriesCollection).insertOne(newRepo); + } catch (e) { + console.error(`ERROR in createGitRepo${e}`); + throw e; + } } async function removeFromWorkgroup(repoId, user) { - try { - const db = dbConnection.getConnection(); - const wGcollection = await db.collection(WorkgroupsCollection); - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - const owner = await db - .collection(userCollection) - .findOne({ _id: repo.owner }); - const workGroup = await wGcollection.findOneAndUpdate( - { Repo: new ObjectId(repoId) }, - { $pull: { Members: { email: user.email } } } - ); - if (workGroup) { - const wG = await wGcollection.findOne({ Repo: new ObjectId(repoId) }); - const result = { owner: {}, member: [] }; - result.owner = { email: owner.email, canEdit: true }; - result.member = wG.Members; - return result; - } - return; - } catch (e) { - console.log(`ERROR in removeFromWorkgroup: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const wGcollection = await db.collection(WorkgroupsCollection); + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + const owner = await db + .collection(userCollection) + .findOne({ _id: repo.owner }); + const workGroup = await wGcollection.findOneAndUpdate( + { Repo: new ObjectId(repoId) }, + { $pull: { Members: { email: user.email } } } + ); + if (workGroup) { + const wG = await wGcollection.findOne({ Repo: new ObjectId(repoId) }); + const result = { owner: {}, member: [] }; + result.owner = { email: owner.email, canEdit: true }; + result.member = wG.Members; + return result; + } + } catch (e) { + console.error(`ERROR in removeFromWorkgroup: ${e}`); + throw e; + } } async function updateOwnerInRepo(repoId, newOwnerId, oldOwnerId) { - try { - const db = dbConnection.getConnection(); - const oldOwner = await getUserById(oldOwnerId); - // set new Owner for the given Repo - const newOwner = await getUserById(newOwnerId); - await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - await db - .collection(repositoriesCollection) - .findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $set: { owner: newOwnerId } } - ); - // remove the new Owner from Workgroup - await removeFromWorkgroup(repoId, newOwner); - - // add old Owner as Member and update Email in Workgroup - const wgMember = { email: oldOwner.email, canEdit: Boolean(true) }; - await db - .collection(WorkgroupsCollection) - .findOneAndUpdate( - { Repo: new ObjectId(repoId) }, - { $set: { owner: newOwner.email }, $push: { Members: wgMember } } - ); - return "Success"; - } catch (e) { - console.log(`ERROR in updateOwnerInRepo ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const oldOwner = await getUserById(oldOwnerId); + // set new Owner for the given Repo + const newOwner = await getUserById(newOwnerId); + await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + await db + .collection(repositoriesCollection) + .findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $set: { owner: newOwnerId } } + ); + // remove the new Owner from Workgroup + await removeFromWorkgroup(repoId, newOwner); + + // add old Owner as Member and update Email in Workgroup + const wgMember = { email: oldOwner.email, canEdit: Boolean(true) }; + await db + .collection(WorkgroupsCollection) + .findOneAndUpdate( + { Repo: new ObjectId(repoId) }, + { $set: { owner: newOwner.email }, $push: { Members: wgMember } } + ); + return 'Success'; + } catch (e) { + console.error(`ERROR in updateOwnerInRepo ${e}`); + throw e; + } } async function updateStoriesArrayInRepo(repoId, storiesArray) { - // TODO: vllt in updateStory reinnehmen dann spare ich den DBAufruf - try { - const sortedStoriesArray = storiesArray.map((s) => new ObjectId(s)); - const db = dbConnection.getConnection(); - return await db - .collection(repositoriesCollection) - .findOneAndUpdate( - { _id: new ObjectId(repoId) }, - { $set: { stories: sortedStoriesArray } }, - { returnNewDocument: true } - ); - } catch (e) { - console.log(`ERROR in updateStoriesArrayInRepo${e}`); - throw e; - } + // TODO: vllt in updateStory reinnehmen dann spare ich den DBAufruf + try { + const sortedStoriesArray = storiesArray.map((s) => new ObjectId(s)); + const db = dbConnection.getConnection(); + return await db + .collection(repositoriesCollection) + .findOneAndUpdate( + { _id: new ObjectId(repoId) }, + { $set: { stories: sortedStoriesArray } }, + { returnNewDocument: true } + ); + } catch (e) { + console.error(`ERROR in updateStoriesArrayInRepo${e}`); + throw e; + } } async function upsertEntry(storyId, updatedContent) { - try { - const myObjt = { - story_id: storyId, - }; - const db = dbConnection.getConnection(); - const collection = await db.collection(storiesCollection); - let result = await collection.findOneAndUpdate( - myObjt, - { $set: updatedContent }, - { - upsert: false, - } - ); - // TODO remove later when all used stories have the tag storySource - if (!result) { - myObjt.storySource = undefined; - result = await collection.findOneAndUpdate( - myObjt, - { $set: updatedContent }, - { - upsert: true, - } - ); - } - return result; - } catch (e) { - console.log(`ERROR in upsertEntry: ${e}`); - throw e; - } + try { + const myObjt = { + story_id: storyId + }; + const db = dbConnection.getConnection(); + const collection = await db.collection(storiesCollection); + let result = await collection.findOneAndUpdate( + myObjt, + { $set: updatedContent }, + { + upsert: false + } + ); + // TODO remove later when all used stories have the tag storySource + if (!result) { + myObjt.storySource = undefined; + result = await collection.findOneAndUpdate( + myObjt, + { $set: updatedContent }, + { + upsert: true + } + ); + } + return result; + } catch (e) { + console.error(`ERROR in upsertEntry: ${e}`); + throw e; + } } async function getTestReports(storyId) { - let result; - try { - const db = dbConnection.getConnection(); - console.log("Getting Report for storyId :", storyId); - result = await db - .collection(ReportDataCollection) - .find( - { storyId: new ObjectId(storyId) }, - { projection: { jsonReport: 0, reportOptions: 0, json: 0 } } - ) - .toArray(); - console.log("Got ", result.length, " reports for :", storyId); - } catch (e) { - console.log("ERROR in getTestReports", e); - } - return result; + let result; + try { + const db = dbConnection.getConnection(); + console.log('Getting Report for storyId :', storyId); + result = await db + .collection(ReportDataCollection) + .find( + { storyId: new ObjectId(storyId) }, + { projection: { jsonReport: 0, reportOptions: 0, json: 0 } } + ) + .toArray(); + console.log('Got ', result.length, ' reports for :', storyId); + } catch (e) { + console.log('ERROR in getTestReports', e); + } + return result; } async function getGroupTestReports(storyId) { - try { - const db = dbConnection.getConnection(); - console.log("Getting Groups Reports for storyId :", storyId); - // projection value 0 excludes from returning - const query = { - storyStatuses: { $elemMatch: { storyId: new ObjectId(storyId) } }, - }; - const result = await db - .collection(ReportDataCollection) - .find(query, { projection: { jsonReport: 0, reportOptions: 0, json: 0 } }) - .toArray(); - console.log("Got ", result.length, " Group Reports for :", storyId); - return result; - } catch (e) { - console.log("Error in getGroupTestReports: ", e); - return {}; - } + try { + const db = dbConnection.getConnection(); + console.log('Getting Groups Reports for storyId :', storyId); + // projection value 0 excludes from returning + const query = { + storyStatuses: { $elemMatch: { storyId: new ObjectId(storyId) } } + }; + const result = await db + .collection(ReportDataCollection) + .find(query, { projection: { jsonReport: 0, reportOptions: 0, json: 0 } }) + .toArray(); + console.log('Got ', result.length, ' Group Reports for :', storyId); + return result; + } catch (e) { + console.log('Error in getGroupTestReports: ', e); + return {}; + } } async function deleteReport(reportId) { - let result; - let idToDelete; - try { - const db = dbConnection.getConnection(); - const collection = await db.collection(ReportDataCollection); - const reportData = await collection.findOne({ - _id: new ObjectId(reportId), - }); - if (reportData.smallReport) { - idToDelete = reportData.smallReport; - console.log( - "Trying to delete smallReport", - idToDelete, - " in DB for Report", - reportId - ); - await db - .collection(ReportsCollection) - .deleteOne({ _id: new ObjectId(idToDelete) }); - result = await collection.deleteOne({ _id: new ObjectId(reportId) }); - } else { - idToDelete = reportData.bigReport; - console.log( - "trying to delete bigReport", - idToDelete, - " in DB for Report", - reportId - ); - const bucket = await new mongodb.GridFSBucket(db, { - bucketName: "GridFS", - }); - bucket.delete(new ObjectId(idToDelete)); - result = await collection.deleteOne({ _id: new ObjectId(reportId) }); - } - } catch (e) { - console.log("ERROR in deleteReport", e); - } - return result; + let result; + let idToDelete; + try { + const db = dbConnection.getConnection(); + const collection = await db.collection(ReportDataCollection); + const reportData = await collection.findOne({ + _id: new ObjectId(reportId) + }); + if (reportData.smallReport) { + idToDelete = reportData.smallReport; + console.log( + 'Trying to delete smallReport', + idToDelete, + ' in DB for Report', + reportId + ); + await db + .collection(ReportsCollection) + .deleteOne({ _id: new ObjectId(idToDelete) }); + result = await collection.deleteOne({ _id: new ObjectId(reportId) }); + } else { + idToDelete = reportData.bigReport; + console.log( + 'trying to delete bigReport', + idToDelete, + ' in DB for Report', + reportId + ); + const bucket = await new mongodb.GridFSBucket(db, { + bucketName: 'GridFS' + }); + bucket.delete(new ObjectId(idToDelete)); + result = await collection.deleteOne({ _id: new ObjectId(reportId) }); + } + } catch (e) { + console.log('ERROR in deleteReport', e); + } + return result; } async function setIsSavedTestReport(testReportId, isSaved) { - try { - const db = dbConnection.getConnection(); - db.collection(ReportDataCollection).updateOne( - { _id: new ObjectId(testReportId) }, - { - $set: { isSaved }, - } - ); - } catch (e) { - console.log("ERROR in setIsSavedTestReport", e); - } - return "done"; + try { + const db = dbConnection.getConnection(); + db.collection(ReportDataCollection).updateOne( + { _id: new ObjectId(testReportId) }, + { + $set: { isSaved } + } + ); + } catch (e) { + console.log('ERROR in setIsSavedTestReport', e); + } + return 'done'; } async function updateStoryStatus(storyId, storyLastTestStatus) { - try { - const db = dbConnection.getConnection(); - db.collection(storiesCollection).updateOne( - { _id: new ObjectId(storyId) }, - { - $set: { lastTestPassed: storyLastTestStatus }, - } - ); - return "done"; - } catch (e) { - console.log("Error in updateStoryStatus: ", e); - return {}; - } + try { + const db = dbConnection.getConnection(); + db.collection(storiesCollection).updateOne( + { _id: new ObjectId(storyId) }, + { + $set: { lastTestPassed: storyLastTestStatus } + } + ); + return 'done'; + } catch (e) { + console.log('Error in updateStoryStatus: ', e); + return {}; + } } async function updateScenarioStatus( - storyId, - scenarioId, - scenarioLastTestStatus + storyId, + scenarioId, + scenarioLastTestStatus ) { - // TODO: testen - try { - const db = dbConnection.getConnection(); - return await db.collection(storiesCollection).updateOne( - { - _id: new ObjectId(storyId), - scenarios: { - $elemMatch: { scenario_id: scenarioId }, - }, - }, - { - $set: { "scenarios.$.lastTestPassed": scenarioLastTestStatus }, - } - ); - } catch (e) { - console.log( - "Error in updateScenarioStatus. Could not set scenario LastTestPassed: ", - e - ); - } + // TODO: testen + try { + const db = dbConnection.getConnection(); + return await db.collection(storiesCollection).updateOne( + { + _id: new ObjectId(storyId), + scenarios: { + $elemMatch: { scenario_id: scenarioId } + } + }, + { + $set: { 'scenarios.$.lastTestPassed': scenarioLastTestStatus } + } + ); + } catch (e) { + console.log( + 'Error in updateScenarioStatus. Could not set scenario LastTestPassed: ', + e + ); + } } async function uploadBigJsonData(data, fileName) { - const db = dbConnection.getConnection(); - const bucket = await new mongodb.GridFSBucket(db, { bucketName: "GridFS" }); - const id = new ObjectId(); - str(JSON.stringify(data)) - .pipe(bucket.openUploadStreamWithId(id, fileName)) - .on("error", async (error) => { - assert.ifError(error); - }) - .on("finish", async () => { - console.log("Done! Uploaded BigReport"); - console.log("ObjectID: of Big Report: ", id); - return id; - }); - return id; + const db = dbConnection.getConnection(); + const bucket = await new mongodb.GridFSBucket(db, { bucketName: 'GridFS' }); + const id = new ObjectId(); + str(JSON.stringify(data)) + .pipe(bucket.openUploadStreamWithId(id, fileName)) + .on('error', async (error) => { + assert.ifError(error); + }) + .on('finish', async () => { + console.log('Done! Uploaded BigReport'); + console.log('ObjectID: of Big Report: ', id); + return id; + }); + return id; } async function uploadReport(reportResults) { - const reportData = reportResults; - const db = dbConnection.getConnection(); - const collection = await db.collection(ReportDataCollection); - fs.readFile( - reportResults.reportOptions.jsonFile, - "utf8", - async (err, data) => { - if (err) console.log(err); - const jReport = { jsonReport: data, created: new Date() }; - const len = Buffer.byteLength(JSON.stringify(data)); - if (len >= 16000000) { - try { - reportData.bigReport = await uploadBigJsonData( - jReport, - reportResults.storyId - ); - console.log( - "ObjectID: of Big Report in UploadReport: ", - reportData.bigReport - ); - collection.insertOne(reportData); - } catch (e) { - console.log("ERROR in uploadReport", e); - } - } else { - try { - db.collection(ReportsCollection).insertOne(jReport); - reportData.smallReport = jReport._id; - collection.insertOne(reportData); - } catch (e) { - console.log("ERROR in uploadReport", e); - } - } - } - ); - return reportResults; + const reportData = reportResults; + const db = dbConnection.getConnection(); + const collection = await db.collection(ReportDataCollection); + fs.readFile( + reportResults.reportOptions.jsonFile, + 'utf8', + async (err, data) => { + if (err) console.log(err); + const jReport = { jsonReport: data, created: new Date() }; + const len = Buffer.byteLength(JSON.stringify(data)); + if (len >= 16000000) { + try { + reportData.bigReport = await uploadBigJsonData( + jReport, + reportResults.storyId + ); + console.log( + 'ObjectID: of Big Report in UploadReport: ', + reportData.bigReport + ); + collection.insertOne(reportData); + } catch (e) { + console.log('ERROR in uploadReport', e); + } + } else { + try { + db.collection(ReportsCollection).insertOne(jReport); + reportData.smallReport = jReport._id; + collection.insertOne(reportData); + } catch (e) { + console.log('ERROR in uploadReport', e); + } + } + } + ); + return reportResults; } async function getReportFromDB(report) { - let result; - try { - const db = dbConnection.getConnection(); - if (report.smallReport) { - const reportJson = await db - .collection(ReportsCollection) - .findOne({ _id: report.smallReport }); - result = { - _id: report._id, - jsonReport: reportJson.jsonReport, - }; - } else { - const bucket = await new mongodb.GridFSBucket(db, { - bucketName: "GridFS", - }); - const reportString = await toString( - bucket.openDownloadStream(new ObjectId(report.bigReport.toString())) - ); - const reportJson = JSON.parse(reportString); - result = { - _id: report._id, - jsonReport: reportJson.jsonReport, - }; - } - return result; - } catch (e) { - console.log("ERROR in getReportFromDB", e); - return {}; - } + let result; + try { + const db = dbConnection.getConnection(); + if (report.smallReport) { + const reportJson = await db + .collection(ReportsCollection) + .findOne({ _id: report.smallReport }); + result = { + _id: report._id, + jsonReport: reportJson.jsonReport + }; + } else { + const bucket = await new mongodb.GridFSBucket(db, { + bucketName: 'GridFS' + }); + const reportString = await toString( + bucket.openDownloadStream(new ObjectId(report.bigReport.toString())) + ); + const reportJson = JSON.parse(reportString); + result = { + _id: report._id, + jsonReport: reportJson.jsonReport + }; + } + return result; + } catch (e) { + console.log('ERROR in getReportFromDB', e); + return {}; + } } async function getReportByName(reportName) { - try { - const db = dbConnection.getConnection(); - const report = await db - .collection(ReportDataCollection) - .findOne({ reportName }); - return await getReportFromDB(report); - } catch (e) { - console.log("ERROR in getReportByName", e); - return {}; - } + try { + const db = dbConnection.getConnection(); + const report = await db + .collection(ReportDataCollection) + .findOne({ reportName }); + return await getReportFromDB(report); + } catch (e) { + console.log('ERROR in getReportByName', e); + return {}; + } } async function getReportById(reportId) { - try { - const db = dbConnection.getConnection(); - const report = await db - .collection(ReportDataCollection) - .findOne({ _id: new ObjectId(reportId.toString()) }); - return await getReportFromDB(report); - } catch (e) { - console.log("ERROR in getReportById (DBServices)", e); - return {}; - } + try { + const db = dbConnection.getConnection(); + const report = await db + .collection(ReportDataCollection) + .findOne({ _id: new ObjectId(reportId.toString()) }); + return await getReportFromDB(report); + } catch (e) { + console.log('ERROR in getReportById (DBServices)', e); + return {}; + } } async function getReportDataById(reportId) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(ReportDataCollection) - .findOne({ _id: new ObjectId(reportId.toString()) }); - } catch (e) { - console.log("ERROR in getReportDataById (DBServices)", e); - return {}; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(ReportDataCollection) + .findOne({ _id: new ObjectId(reportId.toString()) }); + } catch (e) { + console.log('ERROR in getReportDataById (DBServices)', e); + return {}; + } } // delete User in DB needs ID async function deleteUser(userID) { - try { - // delete user from Workgroup - const oId = new ObjectId(userID); - const myObjt = { _id: oId }; - const db = dbConnection.getConnection(); - const repos = await db - .collection(repositoriesCollection) - .find({ owner: oId }) - .toArray(); - if (repos) { - for (const repo of repos) - for (const storyID of repo.stories) - await db - .collection(storiesCollection) - .deleteOne({ _id: new ObjectId(storyID) }); // use delete repo? - - const resultRepo = await db - .collection(repositoriesCollection) - .deleteMany({ owner: oId }); - const resultUser = await db.collection(userCollection).deleteOne(myObjt); - return { resultUser, resultRepo }; - } - return null; - } catch (e) { - console.log(`ERROR in deleteUser: ${e}`); - throw e; - } + try { + // delete user from Workgroup + const oId = new ObjectId(userID); + const myObjt = { _id: oId }; + const db = dbConnection.getConnection(); + const repos = await db + .collection(repositoriesCollection) + .find({ owner: oId }) + .toArray(); + if (repos) { + for (const repo of repos) for (const storyID of repo.stories) await db + .collection(storiesCollection) + .deleteOne({ _id: new ObjectId(storyID) }); // use delete repo? + + const resultRepo = await db + .collection(repositoriesCollection) + .deleteMany({ owner: oId }); + const resultUser = await db.collection(userCollection).deleteOne(myObjt); + return { resultUser, resultRepo }; + } + return null; + } catch (e) { + console.error(`ERROR in deleteUser: ${e}`); + throw e; + } } // update a User in DB needs ID and JsonObject User returns altered JsonObject User async function updateUser(userID, updatedUser) { - try { - const oId = new ObjectId(userID); - const myObjt = { _id: oId }; - const db = dbConnection.getConnection(); - const result = await db - .collection(userCollection) - .findOneAndReplace(myObjt, updatedUser); - return result; - } catch (e) { - console.log(`ERROR in updateUser: ${e}`); - throw e; - } + try { + const oId = new ObjectId(userID); + const myObjt = { _id: oId }; + const db = dbConnection.getConnection(); + const result = await db + .collection(userCollection) + .findOneAndReplace(myObjt, updatedUser); + return result; + } catch (e) { + console.error(`ERROR in updateUser: ${e}`); + throw e; + } } // get UserData needs ID returns JsonObject User async function getUserData(userID) { - try { - const oId = new ObjectId(userID); - const myObjt = { _id: oId }; - const db = dbConnection.getConnection(); - return await db.collection(userCollection).findOne(myObjt); - } catch (e) { - console.log(`ERROR FEHLERin getUserData: ${e}`); - throw e; - } + try { + const oId = new ObjectId(userID); + const myObjt = { _id: oId }; + const db = dbConnection.getConnection(); + return await db.collection(userCollection).findOne(myObjt); + } catch (e) { + console.error(`ERROR FEHLERin getUserData: ${e}`); + throw e; + } } async function saveBlock(block, session = undefined, client = undefined) { - try { - block = mongoSanitize(block); - block.repositoryId = new ObjectId(block.repositoryId); - block.owner = new ObjectId(block.owner.toString()); - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - return await db.collection(CustomBlocksCollection).insertOne(block, {session: session || undefined}); - } catch (e) { - console.log(`ERROR in saveBlock: ${e}`); - throw e; - } + try { + block = mongoSanitize(block); + block.repositoryId = new ObjectId(block.repositoryId); + block.owner = new ObjectId(block.owner.toString()); + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + return await db.collection(CustomBlocksCollection).insertOne(block, { session: session || undefined }); + } catch (e) { + console.error(`ERROR in saveBlock: ${e}`); + throw e; + } } async function updateBlock( - blockId, - updatedBlock, - session = undefined, - client = undefined + blockId, + updatedBlock, + session = undefined, + client = undefined ) { - try { - updatedBlock._id = new ObjectId(updatedBlock._id); - updatedBlock.repositoryId = new ObjectId(updatedBlock.repositoryId); - updatedBlock.owner = new ObjectId(updatedBlock.owner); - const db = session - ? client.db('Seed', session) - : dbConnection.getConnection(); - updatedBlock._id = new ObjectId(updatedBlock._id); - updatedBlock.repositoryId = new ObjectId(updatedBlock.repositoryId); - updatedBlock.owner = new ObjectId(updatedBlock.owner); - await db - .collection(CustomBlocksCollection) - .findOneAndReplace({ _id: new ObjectId(blockId) }, updatedBlock, {session: session || undefined}); - } catch (e) { - console.log(`ERROR in updateBlock: ${e}`); - throw e; - } + try { + updatedBlock._id = new ObjectId(updatedBlock._id); + updatedBlock.repositoryId = new ObjectId(updatedBlock.repositoryId); + updatedBlock.owner = new ObjectId(updatedBlock.owner); + const db = session + ? client.db('Seed', session) + : dbConnection.getConnection(); + updatedBlock._id = new ObjectId(updatedBlock._id); + updatedBlock.repositoryId = new ObjectId(updatedBlock.repositoryId); + updatedBlock.owner = new ObjectId(updatedBlock.owner); + await db + .collection(CustomBlocksCollection) + .findOneAndReplace({ _id: new ObjectId(blockId) }, updatedBlock, { session: session || undefined }); + } catch (e) { + console.error(`ERROR in updateBlock: ${e}`); + throw e; + } } // get one Block by Id async function getBlock(blockId) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(CustomBlocksCollection) - .findOne({ _id: new ObjectId(blockId) }); - } catch (e) { - console.log(`ERROR in getBlock: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(CustomBlocksCollection) + .findOne({ _id: new ObjectId(blockId) }); + } catch (e) { + console.error(`ERROR in getBlock: ${e}`); + throw e; + } } // get all Blocks by Id returns Array with all existing CustomBlocks async function getBlocks(repoId) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(CustomBlocksCollection) - .find({ repositoryId: new ObjectId(repoId) }) - .toArray(); - } catch (e) { - console.log(`ERROR in getBlocks: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(CustomBlocksCollection) + .find({ repositoryId: new ObjectId(repoId) }) + .toArray(); + } catch (e) { + console.error(`ERROR in getBlocks: ${e}`); + throw e; + } } // deletes the CustomBlock with the given Name, need the name async function deleteBlock(blockId, userId) { - try { - const myObjt = { - _id: new ObjectId(blockId), - owner: new ObjectId(userId), - }; - const db = dbConnection.getConnection(); - await db.collection(CustomBlocksCollection).deleteOne(myObjt); - return "done"; - } catch (e) { - console.log(`ERROR in deleteBlock: ${e}`); - throw e; - } + try { + const myObjt = { + _id: new ObjectId(blockId), + owner: new ObjectId(userId) + }; + const db = dbConnection.getConnection(); + await db.collection(CustomBlocksCollection).deleteOne(myObjt); + return 'done'; + } catch (e) { + console.error(`ERROR in deleteBlock: ${e}`); + throw e; + } } async function getWorkgroup(id) { - try { - const db = dbConnection.getConnection(); - return await db - .collection(WorkgroupsCollection) - .findOne({ Repo: new ObjectId(id) }); - } catch (e) { - console.log(`ERROR in getWorkgroup: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + return await db + .collection(WorkgroupsCollection) + .findOne({ Repo: new ObjectId(id) }); + } catch (e) { + console.error(`ERROR in getWorkgroup: ${e}`); + throw e; + } } /** @@ -1565,265 +1564,261 @@ async function getWorkgroup(id) { * @returns */ async function addMember(repoId, user) { - try { - const db = dbConnection.getConnection(); - const wGCollection = await db.collection(WorkgroupsCollection); - const check = await wGCollection.findOne({ - Repo: new ObjectId(repoId), - Members: { $elemMatch: { email: user.email } }, - }); - if (check) return "Dieser User ist bereits in der Workgroup"; - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - const owner = await db - .collection(userCollection) - .findOne({ _id: repo.owner }); - const workGroup = await wGCollection.findOne({ - Repo: new ObjectId(repoId), - }); - if (!workGroup) { - await wGCollection.insertOne({ - name: repo.repoName, - owner: owner.email, - Repo: new ObjectId(repoId), - Members: [{ email: user.email, canEdit: Boolean(user.canEdit) }], - }); - } else { - await wGCollection.findOneAndUpdate( - { Repo: new ObjectId(repoId) }, - { $push: { Members: user } } - ); - } - const result = { owner: {}, member: [] }; - const wG = await wGCollection.findOne({ Repo: new ObjectId(repoId) }); - result.owner = { email: owner.email, canEdit: true }; - result.member = wG.Members; - return result; - } catch (e) { - console.log(`ERROR in addMember: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const wGCollection = await db.collection(WorkgroupsCollection); + const check = await wGCollection.findOne({ + Repo: new ObjectId(repoId), + Members: { $elemMatch: { email: user.email } } + }); + if (check) return 'Dieser User ist bereits in der Workgroup'; + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + const owner = await db + .collection(userCollection) + .findOne({ _id: repo.owner }); + const workGroup = await wGCollection.findOne({ + Repo: new ObjectId(repoId) + }); + if (!workGroup) { + await wGCollection.insertOne({ + name: repo.repoName, + owner: owner.email, + Repo: new ObjectId(repoId), + Members: [{ email: user.email, canEdit: Boolean(user.canEdit) }] + }); + } else { + await wGCollection.findOneAndUpdate( + { Repo: new ObjectId(repoId) }, + { $push: { Members: user } } + ); + } + const result = { owner: {}, member: [] }; + const wG = await wGCollection.findOne({ Repo: new ObjectId(repoId) }); + result.owner = { email: owner.email, canEdit: true }; + result.member = wG.Members; + return result; + } catch (e) { + console.error(`ERROR in addMember: ${e}`); + throw e; + } } async function updateMemberStatus(repoId, user) { - try { - const db = dbConnection.getConnection(); - const wGCollection = await db.collection(WorkgroupsCollection); - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(repoId) }); - const usersCollection = await db.collection(userCollection); - const owner = await usersCollection.findOne({ _id: repo.owner }); - const updatedWG = await wGCollection.findOneAndUpdate( - { Repo: new ObjectId(repoId) }, - { $set: { "Members.$[elem].canEdit": Boolean(user.canEdit) } }, - { arrayFilters: [{ "elem.email": user.email }] } - ); - if (updatedWG) { - const wG = await wGCollection.findOne({ Repo: new ObjectId(repoId) }); - const result = { owner: {}, member: [] }; - result.owner = { email: owner.email, canEdit: true }; - result.member = wG.Members; - return result; - } - } catch (e) { - console.log(`ERROR in updateMemberStatus: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const wGCollection = await db.collection(WorkgroupsCollection); + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(repoId) }); + const usersCollection = await db.collection(userCollection); + const owner = await usersCollection.findOne({ _id: repo.owner }); + const updatedWG = await wGCollection.findOneAndUpdate( + { Repo: new ObjectId(repoId) }, + { $set: { 'Members.$[elem].canEdit': Boolean(user.canEdit) } }, + { arrayFilters: [{ 'elem.email': user.email }] } + ); + if (updatedWG) { + const wG = await wGCollection.findOne({ Repo: new ObjectId(repoId) }); + const result = { owner: {}, member: [] }; + result.owner = { email: owner.email, canEdit: true }; + result.member = wG.Members; + return result; + } + } catch (e) { + console.error(`ERROR in updateMemberStatus: ${e}`); + throw e; + } } async function getMembers(id) { - try { - const db = dbConnection.getConnection(); - const repo = await db - .collection(repositoriesCollection) - .findOne({ _id: new ObjectId(id) }); - const owner = await db - .collection(userCollection) - .findOne({ _id: repo.owner }); - const wG = await db - .collection(WorkgroupsCollection) - .findOne({ Repo: new ObjectId(id) }); - if (!wG) - return { owner: { email: owner.email, canEdit: true }, member: [] }; - const result = { owner: {}, member: [] }; - result.owner = { email: owner.email, canEdit: true }; - result.member = wG.Members; - return result; - } catch (e) { - console.log(`ERROR in getMembers: ${e}`); - throw e; - } + try { + const db = dbConnection.getConnection(); + const repo = await db + .collection(repositoriesCollection) + .findOne({ _id: new ObjectId(id) }); + const owner = await db + .collection(userCollection) + .findOne({ _id: repo.owner }); + const wG = await db + .collection(WorkgroupsCollection) + .findOne({ Repo: new ObjectId(id) }); + if (!wG) return { owner: { email: owner.email, canEdit: true }, member: [] }; + const result = { owner: {}, member: [] }; + result.owner = { email: owner.email, canEdit: true }; + result.member = wG.Members; + return result; + } catch (e) { + console.error(`ERROR in getMembers: ${e}`); + throw e; + } } async function updateOneDriver(id, driver) { - try { - const oneDriver = !driver.oneDriver; - const db = dbConnection.getConnection(); - const result = await db - .collection(storiesCollection) - .findOneAndUpdate({ _id: new ObjectId(id) }, { $set: { oneDriver } }); - return result; - } catch (e) { - console.log("ERROR in updateOneDriver: ", e); - } -} - -//Function to add stories for the import functionality + try { + const oneDriver = !driver.oneDriver; + const db = dbConnection.getConnection(); + const result = await db + .collection(storiesCollection) + .findOneAndUpdate({ _id: new ObjectId(id) }, { $set: { oneDriver } }); + return result; + } catch (e) { + console.log('ERROR in updateOneDriver: ', e); + } +} + +// Function to add stories for the import functionality async function importStories( - post, - importRepo, - session, - storyFiles, - groupMapping, - existingNameList, - importMode, - checkAndAddSuffix, - findAssociatedID, - client, - file, - allConflicts = [] + post, + importRepo, + session, + storyFiles, + groupMapping, + existingNameList, + importMode, + checkAndAddSuffix, + findAssociatedID, + client, + file, + allConflicts = [] ) { - const zip = new AdmZip(file.buffer); - // Function to insert newID based on oldID - function insertNewId(oldID, newID) { - for (const mapping of groupMapping) { - if (mapping.oldID === oldID) { - mapping.newID = newID; - return; // Exit the loop after successful insertion - } - } - console.error( - `Mapping with oldID: ${oldID} not found for newID insertion.` - ); - } - - // Iterate through each story file - for (const storyFile of storyFiles) { - const storyData = zip.readAsText(storyFile.entryName); - const storyObject = JSON.parse(storyData); - let name = storyObject.title; - if (!post && importMode) { - name = checkAndAddSuffix(storyObject.title, existingNameList); - storyObject.title = name; - } - - if (post || importMode) { - const newStory = await createStory( - name, - storyObject.body === null ? undefined : storyObject.body, - importRepo, - session, - client - ); - //Insert new ID corresponding to old one - insertNewId( - groupMapping[storyFiles.indexOf(storyFile)].oldID, - newStory.toHexString() - ); - //Get newly created StoryID and paste it into "old" story to replace newly generated one with same id - storyObject._id = newStory; - await updateStory(storyObject, session, client); - await insertStoryIdIntoRepo( - newStory, - importRepo, - session, - client - ); - } - //Apply same logic for override - if (!post && !importMode && findAssociatedID(name, allConflicts)) { - storyObject._id = findAssociatedID(name, allConflicts); - await updateStory(storyObject, session, client); - } - } -} - -//RepoBlocks creation and assignment function for the import functionality + const zip = new AdmZip(file.buffer); + // Function to insert newID based on oldID + function insertNewId(oldID, newID) { + for (const mapping of groupMapping) { + if (mapping.oldID === oldID) { + mapping.newID = newID; + return; // Exit the loop after successful insertion + } + } + console.error( + `Mapping with oldID: ${oldID} not found for newID insertion.` + ); + } + + // Iterate through each story file + for (const storyFile of storyFiles) { + const storyData = zip.readAsText(storyFile.entryName); + const storyObject = JSON.parse(storyData); + let name = storyObject.title; + if (!post && importMode) { + name = checkAndAddSuffix(storyObject.title, existingNameList); + storyObject.title = name; + } + + if (post || importMode) { + const newStory = await createStory( + name, + storyObject.body === null ? undefined : storyObject.body, + importRepo, + session, + client + ); + // Insert new ID corresponding to old one + insertNewId( + groupMapping[storyFiles.indexOf(storyFile)].oldID, + newStory.toHexString() + ); + // Get newly created StoryID and paste it into "old" story to replace newly generated one with same id + storyObject._id = newStory; + await updateStory(storyObject, session, client); + await insertStoryIdIntoRepo( + newStory, + importRepo, + session, + client + ); + } + // Apply same logic for override + if (!post && !importMode && findAssociatedID(name, allConflicts)) { + storyObject._id = findAssociatedID(name, allConflicts); + await updateStory(storyObject, session, client); + } + } +} + +// RepoBlocks creation and assignment function for the import functionality async function importBlocks( - post, - importRepo, - repoName, - session, - existingNameList, - repoBlocksData, - importMode, - checkAndAddSuffix, - findAssociatedID, - client, - allConflicts = [] + post, + importRepo, + repoName, + session, + existingNameList, + repoBlocksData, + importMode, + checkAndAddSuffix, + findAssociatedID, + client, + allConflicts = [] ) { - for (const singularBlock of repoBlocksData) { - if (!post && importMode) - singularBlock.name = checkAndAddSuffix( - singularBlock.name, - existingNameList - ); - singularBlock.repository = repoName; - singularBlock.repositoryId = importRepo; - if (!post && !importMode) { - singularBlock._id = findAssociatedID(singularBlock.name, allConflicts); - if(!singularBlock._id) - console.log("Error within findAssociatedID @ importBlocks"); - await updateBlock(singularBlock._id, singularBlock, session, client); - } else { - await saveBlock(singularBlock, session, client); - } - } -} - -//Group creation and assignment function for the import functionality + for (const singularBlock of repoBlocksData) { + if (!post && importMode) singularBlock.name = checkAndAddSuffix( + singularBlock.name, + existingNameList + ); + singularBlock.repository = repoName; + singularBlock.repositoryId = importRepo; + if (!post && !importMode) { + singularBlock._id = findAssociatedID(singularBlock.name, allConflicts); + if (!singularBlock._id) console.error('Error within findAssociatedID @ importBlocks'); + await updateBlock(singularBlock._id, singularBlock, session, client); + } else { + await saveBlock(singularBlock, session, client); + } + } +} + +// Group creation and assignment function for the import functionality // TODO: Ggf. Export anpassen -> Ersatz von StoryIds durch indices vermutlich unnötig async function importGroups( - post, - importRepo, - session, - groupFiles, - groupMapping, - existingNameList, - importMode, - repo_id, - checkAndAddSuffix, - findAssociatedID, - client, - file, - allConflicts = [] + post, + importRepo, + session, + groupFiles, + groupMapping, + existingNameList, + importMode, + repo_id, + checkAndAddSuffix, + findAssociatedID, + client, + file, + allConflicts = [] ) { - const zip = new AdmZip(file.buffer); - for (const groupFile of groupFiles) { - const groupData = zip.readAsText(groupFile.entryName); - const groupObject = JSON.parse(groupData); - for (let i = 0; i < groupObject.member_stories.length; i++) { - groupObject.member_stories[i] = - groupMapping[groupObject.member_stories[i]].newID; - } - let name = groupObject.name; - if (!post && importMode) name = checkAndAddSuffix(name, existingNameList); - if (!post && !importMode && findAssociatedID(name, allConflicts)) { - groupObject._id = findAssociatedID(name, allConflicts); - let oldGroup = await getOneStoryGroup(repo_id, groupObject._id); - groupObject.member_stories = oldGroup.member_stories; - const updatedGroup = await updateStoryGroup( - repo_id, - groupObject._id, - groupObject, - session, - client - ); - console.log("Group " + name + " has been updated. ID: " + updatedGroup._id); - } else { - await createStoryGroup( - importRepo, - name, - groupObject.member_stories, - groupObject.isSequential, - session, - client - ); - console.log("Group " + name + " inserted into: " + importRepo); - } - } + const zip = new AdmZip(file.buffer); + for (const groupFile of groupFiles) { + const groupData = zip.readAsText(groupFile.entryName); + const groupObject = JSON.parse(groupData); + for (let i = 0; i < groupObject.member_stories.length; i++) { + groupObject.member_stories[i] = groupMapping[groupObject.member_stories[i]].newID; + } + let { name } = groupObject; + if (!post && importMode) name = checkAndAddSuffix(name, existingNameList); + if (!post && !importMode && findAssociatedID(name, allConflicts)) { + groupObject._id = findAssociatedID(name, allConflicts); + const oldGroup = await getOneStoryGroup(repo_id, groupObject._id); + groupObject.member_stories = oldGroup.member_stories; + const updatedGroup = await updateStoryGroup( + repo_id, + groupObject._id, + groupObject, + session, + client + ); + console.log(`Group ${name} has been updated. ID: ${updatedGroup._id}`); + } else { + await createStoryGroup( + importRepo, + name, + groupObject.member_stories, + groupObject.isSequential, + session, + client + ); + console.log(`Group ${name} inserted into: ${importRepo}`); + } + } } async function fileUpload(filename, repoId, file) { @@ -1832,19 +1827,22 @@ async function fileUpload(filename, repoId, file) { const bucket = new mongodb.GridFSBucket(db, { bucketName: 'GridFS' }); const repoObjId = new ObjectId(repoId); const id = new ObjectId(); - //base filename may be the same as filename, excluding extension + // base filename may be the same as filename, excluding extension const baseFilename = filename.replace(/\s?(\(\d+\))?\.\w+$/, ''); // the regex searches for files and any (0-9) // eslint-disable-next-line no-useless-escape - const existingFiles = await db.collection('GridFS.files').find({ filename: { $regex: `^${baseFilename}` }, metadata: { repoId: repoObjId } }, { filename: 1 }).toArray(); + const existingFiles = await db.collection('GridFS.files').find({ filename: { $regex: `^${baseFilename}` }, metadata: { repoId: repoObjId } }, { filename: 1 }) + .toArray(); const existingFilenames = existingFiles.map((file) => file.filename); const newFilename = generateUniqueFilename(existingFilenames, baseFilename, filename); return new Promise((resolve, reject) => { - str(file,'base64') + str(file, 'base64') .pipe(bucket.openUploadStreamWithId(id, newFilename, { metadata: { repoId: repoObjId } })) .on('error', async (error) => reject(error)) - .on('finish', async () => resolve({_id: id, filename: newFilename, uploadDate: new Date(Date.now()).toISOString(), metadata: { repoId: repoObjId } })); + .on('finish', async () => resolve({ + _id: id, filename: newFilename, uploadDate: new Date(Date.now()).toISOString(), metadata: { repoId: repoObjId } + })); }); } catch (e) { console.log('ERROR in file upload: ', e); @@ -1856,7 +1854,7 @@ function generateUniqueFilename(existingFilenames, baseFilename, filename) { let newFilename = filename; let count = 2; while (existingFilenames.includes(newFilename)) { - newFilename = baseFilename + ' (' + count++ + ').' + filename.split('.').pop(); + newFilename = `${baseFilename} (${count++}).${filename.split('.').pop()}`; } return newFilename; } @@ -1887,7 +1885,7 @@ async function getFileList(repoId) { /** * Store Files Temporarily in the File System - * @param {string} fileIds + * @param {string} fileIds */ async function getFiles(fileTitles, repoId) { const db = dbConnection.getConnection(); @@ -1904,7 +1902,7 @@ async function getFiles(fileTitles, repoId) { default: destinationDirectory = '/home/public/SeedTmp/'; } - console.log('destination: ', destinationDirectory) + console.log('destination: ', destinationDirectory); if (!fs.existsSync(destinationDirectory)) { fs.mkdirSync(destinationDirectory, { recursive: true }); } @@ -1916,14 +1914,14 @@ async function getFiles(fileTitles, repoId) { const downloadStream = bucket.openDownloadStream(fileInfo[0]._id); const destinationPath = destinationDirectory + fileInfo[0].filename; const fileWriteStream = fs.createWriteStream(destinationPath); - + setTimeout(() => { fs.unlink(destinationPath, (err) => { if (err) console.log(err); else console.log(`${fileInfo[0].filename} deleted.`); }); }, 18000000); // 5h Timeout - + await new Promise((resolve, reject) => { downloadStream.pipe(fileWriteStream); downloadStream.on('error', reject); @@ -1932,7 +1930,7 @@ async function getFiles(fileTitles, repoId) { }).catch((e) => { console.error(e); }); - + console.log('Datei erfolgreich heruntergeladen:', destinationPath); } catch (error) { console.error('Datei nicht gefunden:', error.message); @@ -1953,6 +1951,51 @@ function mongoSanitize(v) { // from https://github.com/vkarpov15/mongo-sanitize return v; } +async function getStoriesByIssueKeys(issueKeys) { + try { + const db = dbConnection.getConnection(); + const stories = await db.collection(storiesCollection).find({ + issue_number: { $in: issueKeys } + }) + .project({ _id: 1 }) + .toArray(); + + if (!stories.length) { + console.log('No stories found for the provided issue keys.'); + return []; + } + + const storyIds = stories.map((story) => story._id.toString()); + return storyIds; + } catch (error) { + console.error('Error retrieving stories by issue keys:', error); + throw error; + } +} + +/** + * Get single story by issue key + */ + +async function getOneStoryByIssueKey(issueKey) { + try { + const db = dbConnection.getConnection(); + const story = await db.collection(storiesCollection).findOne({ + issue_number: issueKey + }); + + if (!story) { + console.log('No story found for the provided issue key.'); + return null; + } + + return story; + } catch (error) { + console.error('Error retrieving story by issue key:', error); + throw error; + } +} + module.exports = { getFileList, getFiles, @@ -2033,5 +2076,7 @@ module.exports = { getRepoSettingsById, importStories, importBlocks, - importGroups + importGroups, + getStoriesByIssueKeys, + getOneStoryByIssueKey }; diff --git a/backend/src/helpers/projectManagement.ts b/backend/src/helpers/projectManagement.ts index 3e440bb56..f93208bef 100644 --- a/backend/src/helpers/projectManagement.ts +++ b/backend/src/helpers/projectManagement.ts @@ -8,16 +8,17 @@ import AdmZip from "adm-zip"; import path from "path"; enum Sources { - GITHUB = "github", - JIRA = "jira", - DB = "db", + GITHUB = "github", + JIRA = "jira", + DB = "db" } class Group { - _id: string; - name: string; - member_stories: Array; - isSequential: boolean; + _id: string + name: string + member_stories: Array + isSequential: boolean + xrayTestSet: boolean } class Repository { @@ -211,19 +212,120 @@ function starredRepositories(ownerId, githubId, githubName, token) { ); } -async function fuseStoryWithDb(story) { - const result = await mongo.getOneStory(parseInt(story.story_id, 10)); - if (result !== null) { - story.scenarios = result.scenarios; - story.background = result.background; - story.lastTestPassed = result.lastTestPassed; +function mergeTestRunSteps(dbTestRunSteps, jiraTestRunSteps) { + if (dbTestRunSteps !== jiraTestRunSteps) { + return jiraTestRunSteps; } + else { + return dbTestRunSteps; + } +} + +function mergeStepDefinitions(dbStepDefinitions, jiraStepDefinitions) { + const mergedStepDefinitions = {}; + ['given', 'when', 'then', 'example'].forEach(stepType => { + const dbSteps = dbStepDefinitions[stepType] || []; + const jiraSteps = jiraStepDefinitions[stepType] || []; + + const allJiraInDb = jiraSteps.every(jiraStep => + dbSteps.some(dbStep => dbStep.id === jiraStep.id) + ); + + if (allJiraInDb) { + mergedStepDefinitions[stepType] = dbSteps.map(dbStep => { + const jiraStep = jiraSteps.find(jStep => jStep.id === dbStep.id); + return jiraStep ? {...dbStep, ...jiraStep} : dbStep; + }); + } else { + mergedStepDefinitions[stepType] = [ + ...jiraSteps, + ...dbSteps.filter(dbStep => !jiraSteps.some(jStep => jStep.id === dbStep.id)) + ]; + } + }); + + return mergedStepDefinitions; +} + + +function mergeStories(dbStory, jiraStory) { + const mergedStory = { ...dbStory }; + const dbScenarios = dbStory.scenarios; + const jiraScenarios = jiraStory.scenarios; + + const dbScenarioMap = new Map(); + dbScenarios.forEach(scenario => dbScenarioMap.set(scenario.scenario_id, scenario)); + + const jiraScenarioMap = new Map(); + jiraScenarios.forEach(scenario => jiraScenarioMap.set(scenario.scenario_id, scenario)); + + // inidicates if all xray jira scenarios are in db + const allJiraInDb = jiraScenarios.every(jiraScenario => dbScenarioMap.has(jiraScenario.scenario_id)); + + const mergedScenarios = []; + + // if all jira scenarios are in db, we keep db order and merge jira scenarios + if (allJiraInDb) { + + dbScenarios.forEach(dbScenario => { + const jiraScenario = jiraScenarioMap.get(dbScenario.scenario_id); + if (jiraScenario) { + mergedScenarios.push({ + ...dbScenario, + name: jiraScenario.name, + stepDefinitions: mergeStepDefinitions(dbScenario.stepDefinitions, jiraScenario.stepDefinitions), + testRunSteps: mergeTestRunSteps(dbScenario.testRunSteps, jiraScenario.testRunSteps), + testKey: jiraScenario.testKey + }); + } else { + mergedScenarios.push(dbScenario); + } + }); + // if not all jira scenarios are in db, we keep jira order first and add db scenarios } else { - story.scenarios = [emptyScenario()]; - story.background = emptyBackground(); + jiraScenarios.forEach(jiraScenario => { + const dbScenario = dbScenarioMap.get(jiraScenario.scenario_id); + if (dbScenario) { + mergedScenarios.push({ + ...dbScenario, + name: jiraScenario.name, + stepDefinitions: mergeStepDefinitions(dbScenario.stepDefinitions, jiraScenario.stepDefinitions), + testRunSteps: mergeTestRunSteps(dbScenario.testRunSteps, jiraScenario.testRunSteps), + testKey: jiraScenario.testKey + }); + } else { + mergedScenarios.push(jiraScenario); + } + }); + + // add remaining db scenarios that were not processed + dbScenarios.forEach(dbScenario => { + if (!jiraScenarioMap.has(dbScenario.scenario_id)) { + mergedScenarios.push(dbScenario); + } + }); } - story.story_id = parseInt(story.story_id, 10); - if (story.storySource !== "jira") - story.issue_number = parseInt(story.issue_number, 10); + + mergedStory.scenarios = mergedScenarios; + + return mergedStory; +} + + +async function fuseStoryWithDb(story) { + const result = await mongo.getOneStory(parseInt(story.story_id, 10)); + + if (result !== null) { + + const mergedStory = mergeStories(result, story); + story.scenarios = mergedStory.scenarios; + story.background = result.background; + story.lastTestPassed = result.lastTestPassed; + } else { + story.scenarios = [emptyScenario()]; + story.background = emptyBackground(); + } + story.story_id = parseInt(story.story_id, 10); + if (story.storySource !== 'jira') story.issue_number = parseInt(story.issue_number, 10); const finalStory = await mongo.upsertEntry(story.story_id, story); story._id = finalStory._id; @@ -326,7 +428,6 @@ function checkAndAddSuffix(name, conflictingNameList) { // Add the new name (with or without suffix) to the list conflictingNameList.push(newName); - console.log(conflictingNameList); return newName; } @@ -371,10 +472,8 @@ async function importProject(file, repo_id?, projectName?, importMode?) { }); const repoJsonData = zip.readAsText("repo.json"); const repoData = JSON.parse(repoJsonData); - console.log(repoData); const mappingJsonData = zip.readAsText("keyStoryIds.json"); const mappingData = JSON.parse(mappingJsonData); - console.log(mappingData); const repoBlocksJsonData = zip.readAsText("repoBlocks.json"); const repoBlocksData = JSON.parse(repoBlocksJsonData); let groupMapping = []; @@ -399,7 +498,7 @@ async function importProject(file, repo_id?, projectName?, importMode?) { existingName: title, associatedID: _id.toHexString(), })); - console.log(existingNames); + const newData = existingNames.map(({ existingName }) => existingName); existingNameList = existingNameList.concat(newData); for (const storyFile of storyFiles) { @@ -425,7 +524,7 @@ async function importProject(file, repo_id?, projectName?, importMode?) { existingName: name, associatedID: _id.toHexString(), })); - console.log(existingNames); + const newData = existingNames.map(({ existingName }) => existingName); existingNameList = existingNameList.concat(newData); for (const singularBlock of repoBlocksData) { @@ -449,7 +548,7 @@ async function importProject(file, repo_id?, projectName?, importMode?) { existingName: name, associatedID: _id.toHexString(), })); - console.log(existingNames); + const newData = existingNames.map(({ existingName }) => existingName); existingNameList = existingNameList.concat(newData); for (const groupFile of groupFiles) { @@ -575,9 +674,7 @@ async function importProject(file, repo_id?, projectName?, importMode?) { client, file ); - console.log(groupMapping); - console.log(existingNameList); - + await mongo.importBlocks( true, newRepo.toHexString(), @@ -620,6 +717,53 @@ async function importProject(file, repo_id?, projectName?, importMode?) { await client.close(); } } +async function updateTestSets(testSets, repo_id) { + for (const testSet of testSets) { + try { + const storyIds = await getStorysByIssue(testSet.tests); + + if (storyIds.length === 0) { + console.log(`No stories found for Test Set ${testSet.testSetKey}. Skipping group creation.`); + continue; + } + + // Get repository to update groups + const repository = await mongo.getOneRepositoryById(repo_id); + + // Find existing group by testSetKey + let existingGroup = repository.groups.find(group => group.name === testSet.testSetKey); + + if (existingGroup) { + // Update existing group + const updatedGroup = { ...existingGroup, member_stories: storyIds }; + await mongo.updateStoryGroup(repo_id, existingGroup._id.toString(), updatedGroup); + console.log(`Updated group for Test Set: ${testSet.testSetKey}`); + } else { + // Create a new group if it does not exist + const groupId = await mongo.createStoryGroup( + repo_id, + testSet.testSetKey, + storyIds, + true, + testSet.xrayTestSet + ); + console.log(`Group created for Test Set: ${testSet.testSetKey}`); + } + } catch (e) { + console.error(`Error processing group for Test Set: ${testSet.testSetKey}:`, e); + } + } +} + +async function getStorysByIssue(issueKeys) { + try { + const storiesIds = await mongo.getStoriesByIssueKeys(issueKeys); + return storiesIds + } catch (error) { + console.error("Error fetching stories by issue keys:", error); + return []; + } +} module.exports = { getJiraRepos, @@ -632,4 +776,5 @@ module.exports = { importProject, checkAndAddSuffix, findAssociatedID, + updateTestSets }; diff --git a/backend/src/helpers/reporting.ts b/backend/src/helpers/reporting.ts index 15dec7b72..6e5faddd3 100644 --- a/backend/src/helpers/reporting.ts +++ b/backend/src/helpers/reporting.ts @@ -151,7 +151,6 @@ function analyzeScenarioReport(stories: Array, reportName: string, scenario console.log(`NUMBER OF SCENARIOS IN THE REPORT (must be 1): ${storyReport.elements.length}`); const story = stories[0]; console.log(`Story ID: ${story._id}`); - console.log(story); reportResults.storyId = story._id; const scenarioReport = storyReport.elements[0] @@ -203,10 +202,11 @@ function analyzeGroupReport(grpName: string, stories: any[], reportOptions: any) reportResults.storyStatuses.push(result); } // end of for each story - reportResults.status = testPassed(overallPassedSteps, overallFailedSteps); + reportResults.status = testPassed(overallFailedSteps, overallPassedSteps); reportResults.groupTestResults = { passedSteps: overallPassedSteps, failedSteps: overallFailedSteps, skippedSteps: overallSkippedSteps }; reportResults.scenariosTested = scenariosTested; reportResults.reportName = grpName; + reportResults.storiesTested = stories; return reportResults } catch (error) { reportResults.status = false; @@ -254,7 +254,7 @@ async function createReport(res, reportName: string) {//TODO remove res here pus try { fs.writeFileSync(resolvedPath, report.jsonReport); } catch (error) { - console.log('Error:', error); + console.error('Error:', error); } reporter.generate(setOptions(reportName)); @@ -344,7 +344,7 @@ function scenarioResult(scenarioReport: any, scenario: any) { function deleteReport(jsonReport: string) { const report = path.normalize(`${reportPath}${jsonReport}`); fs.unlink(report, (err) => { - if (err) console.log(err); + if (err) console.error(err); else console.log(`${report} deleted.`); }); } @@ -355,7 +355,6 @@ async function fetchFiles(stories, repoId){ .flatMap(scen => scen.stepDefinitions.when) .filter(step => step.type === "Upload File") .map(step => step.values[0]); - console.log(neededFiles) if (neededFiles) return mongo.getFiles(neededFiles, repoId) } @@ -365,10 +364,10 @@ async function runReport(req, res, stories: any[], mode: ExecutionMode, paramete try { if (mode === ExecutionMode.GROUP) { await fetchFiles(stories, parameters.repositoryId) - req.body.name = req.body.name.replace(/ /g, '_') + Date.now(); + req.body.name = req.body.name.replace(/[ <>&]/g, '_') + Date.now(); fs.mkdirSync(`./features/${req.body.name}`); - if (parameters.isSequential == undefined || !parameters.isSequential) - reportObj = await Promise.all(stories.map((story) => testExecutor.executeTest(req, mode, story))).then((valueArr)=>valueArr.pop()); + if (parameters.isSequential == undefined || !parameters.isSequential){ + reportObj = await Promise.all(stories.map((story) => testExecutor.executeTest(req, mode, story))).then((valueArr)=>valueArr.pop());} else { for (const story of stories) { reportObj = await testExecutor.executeTest(req, mode, story); @@ -380,6 +379,7 @@ async function runReport(req, res, stories: any[], mode: ExecutionMode, paramete reportObj = await testExecutor.executeTest(req, mode, story).catch((reason) =>{console.log('crashed in execute test');res.send(reason).status(500)}); } } catch (error) { + console.error(error) res.status(404).send(error); return; } diff --git a/backend/src/helpers/xray.ts b/backend/src/helpers/xray.ts new file mode 100644 index 000000000..42dbad28c --- /dev/null +++ b/backend/src/helpers/xray.ts @@ -0,0 +1,293 @@ +const stepDefs = require('../../src/database/stepTypes'); + +/** + * Fetches all necessary data for a given test issue, including test runs and test steps. + * + * @param {Object} issue - The test issue object containing the key. + * @param {Object} options - The options object for making the fetch requests (e.g., headers). + * @param {string} Host - The hostname or base URL for the API requests. + * @returns {Promise} An object containing scenarioList and testStepDescription. + */ +async function handleTestIssue(issue, options, Host) { + // Fetch all test runs for the given issue + const testrunResponse = await fetch(`https://${Host}/rest/raven/2.0/api/test/${issue.key}/testruns`, options); + const testRuns = await testrunResponse.json(); + + // Fetch details for all test runs + const testRunDetailsPromises = testRuns.map((testRun) => fetch(`https://${Host}/rest/raven/2.0/api/testrun/${testRun.id}`, options).then(response => response.json())); + const resolvedTestRuns = await Promise.all(testRunDetailsPromises); + + // Fetch all test steps defined for the given issue + const testStepsResponse = await fetch(`https://${Host}/rest/raven/2.0/api/test/${issue.key}/steps`, options); + const testSteps = await testStepsResponse.json(); + + // Process the test steps with corresponding testrun and scenario details + const { scenarioList, testStepDescription } = processTestSteps(testSteps.steps, resolvedTestRuns, issue.key); + + return { scenarioList, testStepDescription }; +} + +/** + * Creates scenarios and description given xray test steps and resolved xray test runs. + * + * @param {Array} steps - An array of test steps for the given issue. + * @param {Array} resolvedTestRuns - An array of resolved test runs containing details of each run. + * @param {string} issueKey - The key of the issue being processed. + * @returns {Object} An object containing the scenarioList and testStepDescription. + */ +function processTestSteps(steps, resolvedTestRuns, issueKey) { + const scenarioList = []; + + let testStepDescription = '\n\nTest-Steps:\n'; + + // Iterate through steps and add step description + steps.forEach((step) => { + if (!step.fields) { + console.log(`Fields missing for step ${step.id}`); + return; + } + + const { fields } = step; + + // Check if xray step is identical to one step from the step definitions + const identicalMatches = checkIdenticalSteps(fields); + + // Create scenario steps from identical matches + const { givenSteps, whenSteps, thenSteps } = createScenarioSteps(identicalMatches); + + // Create scenario object + const stepInfo = [`\n----- Scenario ${step.index} -----\n`]; + stepInfo.push(fields.Given ? `(GIVEN): ${fields.Given.value}\n` : '(GIVEN): Not used\n'); + stepInfo.push(fields.Action && fields.Action.value.raw ? `(WHEN): ${fields.Action.value.raw}\n` : '(WHEN): Not steps used\n'); + stepInfo.push(fields['Expected Result'] && fields['Expected Result'].value.raw ? `(THEN): ${fields['Expected Result'].value.raw}\n` : '(THEN): No steps used\n'); + testStepDescription += stepInfo.join(''); + + const matchingSteps = []; + // Iterate through all resolved test runs + resolvedTestRuns.forEach((testRunDetails) => { + if (!testRunDetails.steps) { + return; + } + // Map test steps to testrun steps + testRunDetails.steps.forEach((testRunStep) => { + const stepGiven = fields.Given ? fields.Given.value : ''; + const stepAction = fields.Action ? fields.Action.value.raw : ''; + const stepExpected = fields['Expected Result'] ? fields['Expected Result'].value.raw : ''; + const testRunGiven = testRunStep.fields.Given ? extractRaw(testRunStep.fields.Given.value) : ''; + const testRunAction = testRunStep.fields.Action ? testRunStep.fields.Action.value.raw : ''; + const testRunExpected = testRunStep.fields['Expected Result'] ? testRunStep.fields['Expected Result'].value.raw : ''; + + if (stepGiven === testRunGiven && stepAction === testRunAction && stepExpected === testRunExpected) { + matchingSteps.push({ + testRunId: testRunDetails.id, + testRunStepId: testRunStep.id, + testExecKey: testRunDetails.testExecKey + }); + } + }); + }); + + const scenario = { + scenario_id: step.id, + name: `${step.id}`, + stepDefinitions: { + given: givenSteps || [], + when: whenSteps || [], + then: thenSteps || [], + example: [] + }, + testRunSteps: matchingSteps, + testKey: issueKey + }; + + scenarioList.push(scenario); + }); + + return { scenarioList, testStepDescription }; +} + +/** + * Checks if the given xray step is identical to one of the step definitions. + * + * @param {Object} step - The xray step containing sections of given, actiona and expected result. + * @returns {Array} An array of matching step definitions. + */ +function checkIdenticalSteps(step) { + const matches = []; + let context; + // Separate the given, action, and expected result sections and select the relevant text + ['Given', 'Action', 'Expected Result'].forEach(section => { + if (step[section] && step[section].value) { + let texts; + if (section === 'Given') { + texts = step[section].value.split('\n'); + context = 'given'; + } else if (section === 'Action') { + texts = step[section].value.raw.split('\n'); + context = 'when'; + } else if (section === 'Expected Result') { + texts = step[section].value.raw.split('\n'); + context = 'then'; + + } + // Analyze each separated text against patterns + texts.forEach(text => { + if (text.trim()) { + const match = analyzeText(text.trim(), context); + if (match) { + matches.push(match); + } + } + }); + } + }); + return matches; +} + +/** + * Analyzes text to match with step definitions. + * + * @param {string} text - The text to analyze. + * @param {string} context - The context (given, when, then) of the step. + * @returns {Object|null} The matching step definition or null if no match found. + */ +function analyzeText(text, context) { + // Get all step types except 'Add Variable' as it is not relevant for the test steps + const stepTypes = stepDefs().filter(def => def.type !== 'Add Variable'); + + for (let stepType of stepTypes) { + if (stepType.stepType === context) { + // Create a pattern based on the pre, mid, and post values of the step definition + // Store the strings after the pre, mid, and post values by (.*) in the pattern + let pattern = `${escapeRegExp(stepType.pre)}(.*)${stepType.mid ? escapeRegExp(stepType.mid) + '(.*)' : ''}`; + if (stepType.post) { + pattern += `${escapeRegExp(stepType.post)}(.*)`; + } + const regex = new RegExp(pattern, 'i'); + const match = text.match(regex); + if (match) { + const values = match.slice(1).map(value => cleanValue(value.trim().replace(/\.$/, ''))).filter(v => v); + if (stepType.type === "Screenshot" && values.length === 0) { + values.push(''); + } + return { + type: stepType.type, + values: values, + pre: stepType.pre, + mid: stepType.mid ? stepType.mid : '', + post: stepType.post ? stepType.post : undefined, + context: context, + origin: "congruent" + }; + } + } + } + return null; +} + +/** + * Replaces special characters in a string with escape characters. + * + * @param {string} string - The string to escape. + * @returns {string} The escaped string. + */ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +/** + * Cleans value and extracts email or link. + * + * @param {string} value - The value to clean. + * @returns {string} The cleaned value. + */ +function cleanValue(value) { + const linkPattern = /^\[http:\/\/[^\]]+\]$/; + const emailPattern = /^\[([^\]]+@[^\]]+)\|mailto:[^\]]+\]$/; + const quotesPattern = /^"(.*)"$/; + + if (quotesPattern.test(value)) { + value = value.match(quotesPattern)[1]; // remove quotes + } + if (linkPattern.test(value)) { + return value.slice(1, -1); // remove square brackets for links + } else if (emailPattern.test(value)) { + return value.match(emailPattern)[1]; // extract the email + } + return value; +} + +/** + * Creates scenario steps for identical steps. + * + * @param {Array} matchingSteps - An array of matching steps. + * @returns {Object} An object containing scenario steps for given, when, and then. + */ +function createScenarioSteps(matchingSteps) { + let givenSteps = [] + let whenSteps = [] + let thenSteps = [] + + let id = 0; + + for (let scenarioStep of matchingSteps) { + + let newStep = { + id: id++, + stepType: scenarioStep.context, + deactivated: false, + origin: scenarioStep.origin, + pre: undefined, + mid: undefined, + post: undefined, + values: undefined + }; + + if (scenarioStep.pre !== undefined) { + newStep.pre = scenarioStep.pre; + } + if (scenarioStep.mid !== undefined) { + newStep.mid = scenarioStep.mid; + } + if (scenarioStep.post !== undefined) { + newStep.post = scenarioStep.post; + } + if (scenarioStep.values !== undefined) { + newStep.values = scenarioStep.values; + } + + if (scenarioStep.context === 'given') { + givenSteps.push(newStep) + } + else if (scenarioStep.context === 'when') { + whenSteps.push(newStep) + } + else if (scenarioStep.context === 'then') { + thenSteps.push(newStep) + } + } + + return { givenSteps, whenSteps, thenSteps } +} + +/** + * Helper function to extract the raw data of "given" section in xray steps + * + * @param {string} givenField - The given field containing raw data. + * @returns {string} The extracted raw data. + */ +function extractRaw(givenField) { + try { + const givenData = JSON.parse(givenField); + if (givenData && givenData.raw) { + return givenData.raw; + } + } catch (e) { + console.error('Error while parsing Given field of xRay execution step', e); + } + return ''; +} + +module.exports = { + handleTestIssue, +}; diff --git a/backend/src/models/models.ts b/backend/src/models/models.ts index adbb40505..33b2cbc7e 100644 --- a/backend/src/models/models.ts +++ b/backend/src/models/models.ts @@ -17,6 +17,7 @@ class GenericReport { reportOptions: any status: boolean scenariosTested: PassedCount + storiesTested reportTime: number mode: ExecutionMode diff --git a/backend/src/serverHelper.js b/backend/src/serverHelper.js index 16b95c524..84fd03898 100644 --- a/backend/src/serverHelper.js +++ b/backend/src/serverHelper.js @@ -54,12 +54,12 @@ function getSteps(steps, stepType) { if (step.deactivated) continue; data += `${jsUcfirst(stepType)} `; if ((step.values[0]) != null && (step.values[0]) !== 'User') { - data += `${step.pre} '${step.values[0]}' ${step.mid}${step.values[1] !== undefined ? `'${step.values[1]}'` : ''}`; - if (step.post !== undefined) data += ` ${step.post}${step.values[2] !== undefined ? `'${step.values[2]}'` : ''}`; + data += `${step.pre} '${step.values[0]}' ${step.mid ? step.mid : ''}${step.values[1] ? `'${step.values[1]}'` : ''}`; + if (step.post) data += ` ${step.post}${step.values[2] ? `'${step.values[2]}'` : ''}`; } else if ((step.values[0]) === 'User') data += `${step.pre} '${step.values[0]}'`; else { data += `${step.pre} ${step.mid}${getValues(step.values)}`; - if (step.post !== undefined) data += ` ${step.post}`; + if (step.post) data += ` ${step.post}`; } data += '\n'; } @@ -77,7 +77,7 @@ function getExamples(steps) { for (let k = 0; k < steps[i].values.length; k++) data += `${steps[i].values[k]} | `; } // if no lines other than value line, return empty - if (data.split('\n').length > 2) data = 'Examples:' + data; + if (data.split('\n').length > 2) data = `Examples:${data}`; else return ''; // explicit return as first line (title/name/key) is always written return `${data}\n`; } @@ -105,7 +105,7 @@ function getScenarioContent(scenarios, storyID) { // Building feature file story-name-content (feature file title) function getFeatureContent(story) { let body = ''; - if (Boolean(story.body)) { + if (story.body) { body = story.body.replaceAll('#', '').replaceAll('(/)', ''); } let data = `Feature: ${story.title}\n\n${body}\n\n`; @@ -197,7 +197,7 @@ function getSettings(scenario, globalSettings) { } async function executeTest(req, mode, story) { - const repoId = req.body.repositoryId; + const repoId = req.body.repositoryId || req.body.repoId; let globalSettings; @@ -320,7 +320,6 @@ function scenarioPrep(scenarios, driver, globalSettings) { }); } } - console.log('my Params ', parameters) }); return { scenarios, parameters }; } @@ -417,7 +416,7 @@ async function replaceRefBlocks(scenarios) { if (!elem._blockReferenceId) return [elem]; return mongo.getBlock(elem._blockReferenceId).then((block) => { // Get an array of the values of the given, when, then properties - const steps = [...block.stepDefinitions.given, ...block.stepDefinitions.when, ...block.stepDefinitions.then] + const steps = [...block.stepDefinitions.given, ...block.stepDefinitions.when, ...block.stepDefinitions.then]; // Flatten array return steps.flat(1); }); @@ -664,4 +663,4 @@ module.exports = { exportSingleFeatureFile, exportProjectFeatureFiles, applySpecialCommands -}; \ No newline at end of file +}; diff --git a/backend/src/serverRouter/backgroundRouter.js b/backend/src/serverRouter/backgroundRouter.js index 7e18f9913..3f829a722 100644 --- a/backend/src/serverRouter/backgroundRouter.js +++ b/backend/src/serverRouter/backgroundRouter.js @@ -15,7 +15,7 @@ router extended: true })) .use((_, __, next) => { - console.log('Time of github request:', Date.now()); + console.log('Time of background router request:', Date.now()); next(); }) .use((req, res, next) => { @@ -57,4 +57,4 @@ router.delete('/:storyID', async (req, res) => { } }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/backend/src/serverRouter/blockRouter.js b/backend/src/serverRouter/blockRouter.js index 34974b588..0d075d567 100644 --- a/backend/src/serverRouter/blockRouter.js +++ b/backend/src/serverRouter/blockRouter.js @@ -15,7 +15,7 @@ router extended: true })) .use((_, __, next) => { - console.log('Time of github request:', Date.now()); + console.log('Time of block router request:', Date.now()); next(); }) .use((req, res, next) => { diff --git a/backend/src/serverRouter/githubRouter.js b/backend/src/serverRouter/githubRouter.js index 7fdda1ced..e8fe76aa1 100644 --- a/backend/src/serverRouter/githubRouter.js +++ b/backend/src/serverRouter/githubRouter.js @@ -14,7 +14,7 @@ router extended: true })) .use((_, __, next) => { - console.log('Time of github request:', Date.now()); + console.log('Time of github router request:', Date.now()); next(); }) .use((req, res, next) => { diff --git a/backend/src/serverRouter/jiraRouter.js b/backend/src/serverRouter/jiraRouter.js index 18d8e282f..7a29ef4e6 100644 --- a/backend/src/serverRouter/jiraRouter.js +++ b/backend/src/serverRouter/jiraRouter.js @@ -2,6 +2,7 @@ const express = require('express'); const cors = require('cors'); const bodyParser = require('body-parser'); const userHelper = require('../../dist/helpers/userManagement'); +const issueTracker = require('../../dist/models/IssueTracker'); const router = express.Router(); @@ -29,13 +30,14 @@ router.post('/user/create/', (req, res) => { console.error('No Jira User sent. (Got undefinded)'); res.status(401).json('No Jira User sent. (Got undefinded)'); } else { - const { jiraAccountName, jiraPassword, jiraHost: jiraServer, jiraAuthMethod } = req.body; + const { + jiraAccountName, jiraPassword, jiraHost: jiraServer, jiraAuthMethod + } = req.body; let authString = `Bearer ${jiraPassword}`; if (jiraAuthMethod === 'basic') { const auth = Buffer.from(`${jiraAccountName}:${jiraPassword}`).toString('base64'); authString = `Basic ${auth}`; } - console.log('auth ', authString); const options = { method: 'GET', qs: { @@ -50,15 +52,19 @@ router.post('/user/create/', (req, res) => { // jiraHost must only consist of letters, numbers, '.' and ':' to represent URLs, IPs or ports if (/^[.:a-zA-Z0-9]+$/.test(jiraServer)) { - console.log(jiraServer); const jiraURL = `http://${jiraServer}/rest/auth/1/session`; fetch(jiraURL, options) .then((response) => response.json()) .then(() => { - userHelper.updateJiraCredential(req.user._id, jiraAccountName, jiraPassword, jiraServer, jiraAuthMethod) - .then((result) => { - res.status(200).json(result); - }); + userHelper.updateJiraCredential( + req.user._id, + jiraAccountName, + jiraPassword, + jiraServer, + jiraAuthMethod + ).then((result) => { + res.status(200).json(result); + }); }) .catch((error) => { console.error(error); @@ -81,7 +87,9 @@ router.delete('/user/disconnect/', (req, res) => { router.post('/login', (req, res) => { if (typeof req.body.jiraAccountName !== 'undefined') { - const { jiraAccountName, jiraPassword, jiraServer, AuthMethod } = req.body; + const { + jiraAccountName, jiraPassword, jiraServer, AuthMethod + } = req.body; let authString = `Bearer ${jiraPassword}`; if (AuthMethod === 'basic') { const auth = Buffer.from(`${jiraAccountName}:${jiraPassword}`).toString('base64'); @@ -120,4 +128,49 @@ router.post('/login', (req, res) => { } }); +router.put('/update-xray-status', async (req, res) => { + if (typeof req.user !== 'undefined' && typeof req.user.jira !== 'undefined') { + const jiraTracker = issueTracker.IssueTracker + .getIssueTracker(issueTracker.IssueTrackerOption.JIRA); + const clearPass = jiraTracker.decryptPassword(req.user.jira); + const { + AccountName, AuthMethod, Host + } = req.user.jira; + let authString = `Bearer ${clearPass}`; + if (AuthMethod === 'basic') { + const auth = Buffer.from(`${AccountName}:${clearPass}`).toString('base64'); + authString = `Basic ${auth}`; + } + const { testRunId, stepId, status } = req.body; + const url = new URL(`https://${Host}/rest/raven/1.0/api/testrun/${testRunId}/step/${stepId}/status`); + url.searchParams.append('status', status); + + const options = { + method: 'PUT', + headers: { + 'cache-control': 'no-cache', + 'Content-Type': 'application/json', + Authorization: authString + } + }; + try { + const response = await fetch(url, options); + if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`); + let data; + const contentType = response.headers.get('content-type'); + if (contentType && contentType.includes('application/json')) { + const text = await response.text(); + if (text) data = JSON.parse(text); + } else data = { message: 'Success', status: response.status }; + res.json(data); + } catch (error) { + console.error('Error while updating Xray status:', error); + res.status(500).json({ message: 'Internal server error while updating Xray status' }); + } + } else { + console.log('No Jira User sent. (Got undefined)'); + res.status(500).json('No Jira User sent. Got (undefined)'); + } +}); + module.exports = router; diff --git a/backend/src/serverRouter/reportRouter.js b/backend/src/serverRouter/reportRouter.js index 313642588..8f115e149 100644 --- a/backend/src/serverRouter/reportRouter.js +++ b/backend/src/serverRouter/reportRouter.js @@ -15,7 +15,7 @@ router extended: true })) .use((_, __, next) => { - console.log('Time of github request:', Date.now()); + console.log('Time of report router request:', Date.now()); next(); }) .use((req, res, next) => { diff --git a/backend/src/serverRouter/runReportRouter.js b/backend/src/serverRouter/runReportRouter.js index 18460393d..956aff921 100644 --- a/backend/src/serverRouter/runReportRouter.js +++ b/backend/src/serverRouter/runReportRouter.js @@ -32,7 +32,7 @@ router // run single Feature router.post('/Feature/:issueID', (req, res) => { - reporter.runReport(req, res, [], 'feature', req.body).catch((reason) => { console.log('failed in runreport', reason); res.send(reason).status(500) }); + reporter.runReport(req, res, [], 'feature', req.body).catch((reason) => { console.log('failed in runreport', reason); res.send(reason).status(500); }); }); // run single Scenario of a Feature @@ -52,9 +52,25 @@ router.post('/Group/:repoID/:groupID', async (req, res) => { params.repository = req.body.repository; params.repoId = req.params.repoID; req.body = group; + console.log('Mystories', mystories); reporter.runReport(req, res, mystories, 'group', req.body).catch((reason) => res.send(reason).status(500)); }); +// run one temporary group and return report +router.post('/TempGroup', async (req, res) => { + const tempGroup = req.body.group; + const mystories = tempGroup.member_stories; + + const params = tempGroup; + params.repository = req.body.repository; + params.repoId = req.body.id; + req.body = tempGroup; + reporter.runReport(req, res, mystories, 'group', tempGroup).then(() => { + console.log('Report for temporary group created'); + }) + .catch((reason) => res.send(reason).status(500)); +}); + // generate older Report router.get('/report/:reportName', (req, res) => { reporter.createReport(res, req.params.reportName); diff --git a/backend/src/serverRouter/sanityTest.js b/backend/src/serverRouter/sanityTest.js index f17289c51..93fbe1a4a 100644 --- a/backend/src/serverRouter/sanityTest.js +++ b/backend/src/serverRouter/sanityTest.js @@ -5,7 +5,7 @@ const bodyParser = require('body-parser'); const passport = require('passport'); const bcrypt = require('bcrypt'); const initializePassport = require('../passport-config'); -const mongo = require('../database/DbServices') +const mongo = require('../database/DbServices'); const reporter = require('../../dist/helpers/reporting'); const router = express.Router(); @@ -39,16 +39,16 @@ router }); router.post('/test/:repoID/:groupID', passport.authenticate('normal-local', { session: false }), async (req, res) => { - try { - req.body.email = req.body.email.toLowerCase(); - await test(req, res); - } catch (error) { - res.status(401).json(error); - } + try { + req.body.email = req.body.email.toLowerCase(); + await test(req, res); + } catch (error) { + res.status(401).json(error); + } }); async function test(req, res) { - const group = await mongo.getOneStoryGroup(req.params.repoID, req.params.groupID); + const group = await mongo.getOneStoryGroup(req.params.repoID, req.params.groupID); const mystories = []; for (const ms of group.member_stories) { const id = typeof (ms) === 'object' ? ms._id : ms; // inconsistent in database @@ -60,4 +60,4 @@ async function test(req, res) { reporter.runSanityReport(req, res, mystories, 'group', req.body).catch((reason) => res.send(reason).status(500)); } -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/backend/src/serverRouter/storyRouter.js b/backend/src/serverRouter/storyRouter.js index bccadafda..d0a246619 100644 --- a/backend/src/serverRouter/storyRouter.js +++ b/backend/src/serverRouter/storyRouter.js @@ -5,6 +5,7 @@ const bodyParser = require('body-parser'); const helper = require('../serverHelper'); const mongo = require('../database/DbServices'); const pmHelper = require('../../dist/helpers/projectManagement'); +const issueTracker = require('../../dist/models/IssueTracker'); const router = express.Router(); const upload = multer({ @@ -52,6 +53,19 @@ router.get('/:_id', async (req, res) => { } }); +// get one Story by issue key +router.get('/issueKey/:issue_key', async (req, res) => { + try { + const story = await mongo.getOneStoryByIssueKey(req.params.issue_key); + if (!story) return res.status(404).send('Story not found'); + + res.status(200).json(story); + } catch (e) { + console.error(e); + handleError(res, e); + } +}); + // create Story router.post('/', async (req, res) => { try { @@ -64,26 +78,10 @@ router.post('/', async (req, res) => { } }); - - -// delete scenario -router.delete('/scenario/:story_id/:_id', async (req, res) => { - try { - await mongo - .deleteScenario(req.params.story_id, parseInt(req.params._id, 10)); - await helper.updateFeatureFile(req.params.story_id); - res.status(200) - .json({ text: 'success' }); - } catch (error) { - handleError(res, error, error, 500); - } -}); - router.get('/download/story/:_id', async (req, res) => { try { console.log('download feature-file', req.params._id); const file = await helper.exportSingleFeatureFile(req.params._id); - console.log(file); res.send(file); } catch (error) { handleError(res, error, error, 500); @@ -95,7 +93,6 @@ router.get('/download/project/:repo_id', async (req, res) => { console.log('download project feature-files', req.params.repo_id); const version = req.query.version_id ? req.query.version_id : ''; const file = await helper.exportProjectFeatureFiles(req.params.repo_id, version); - console.log(file); res.send(file); } catch (error) { handleError(res, error, error, 500); @@ -125,15 +122,11 @@ router.post('/oneDriver/:storyID', async (req, res) => { router.post('/uploadFile/:repoId/', multer().single('file'), async (req, res) => { try { console.log('uploadfile'); - const { repoId } = req.params; - console.log(req.file) - - const file = await mongo.fileUpload(req.file.originalname, repoId, req.file.buffer) - if(file) res.status(200).json(file); - else res.status(500) - + const file = await mongo.fileUpload(req.file.originalname, repoId, req.file.buffer); + if (file) res.status(200).json(file); + else res.status(500); } catch (error) { handleError(res, error, error, 500); } @@ -149,7 +142,6 @@ router.get('/uploadFile/:repoId', async (req, res) => { }); router.delete('/uploadFile/:fileId', async (req, res) => { try { - console.log(req.params.fileId) await mongo.deleteFile(req.params.fileId); res.status(200).json({ message: 'File deleted' }); } catch (error) { @@ -250,14 +242,66 @@ router.put('/:story_id/:_id', async (req, res) => { // delete scenario router.delete('/scenario/:story_id/:_id', async (req, res) => { + let dbError = null; + let xrayError = null; + try { - await mongo - .deleteScenario(req.params.story_id, parseInt(req.params._id, 10)); + await mongo.deleteScenario(req.params.story_id, parseInt(req.params._id, 10)); await helper.updateFeatureFile(req.params.story_id); - res.status(200) - .json({ text: 'success' }); } catch (error) { - handleError(res, error, error, 500); + console.error('Database error:', error); + dbError = error; + } + + // if xray enabled, delete xray step in jira + const xrayEnabled = req.headers['x-xray-enabled'] === 'true'; + console.log('XRay enabled:', xrayEnabled); + if (xrayEnabled) { + const testKey = req.headers['x-test-key']; + try { + if (typeof req.user !== 'undefined' && typeof req.user.jira !== 'undefined') { + const jiraTracker = issueTracker.IssueTracker + .getIssueTracker(issueTracker.IssueTrackerOption.JIRA); + const clearPass = jiraTracker.decryptPassword(req.user.jira); + const { + AccountName, AuthMethod, Host + } = req.user.jira; + let authString = `Bearer ${clearPass}`; + if (AuthMethod === 'basic') { + const auth = Buffer.from(`${AccountName}:${clearPass}`).toString('base64'); + authString = `Basic ${auth}`; + } + + const stepId = req.params._id; + const url = `https://${Host}/rest/raven/1.0/api/test/${testKey}/step/${stepId}/`; + + const options = { + method: 'DELETE', + headers: { + 'cache-control': 'no-cache', + 'Content-Type': 'application/json', + Authorization: authString + } + }; + + const response = await fetch(url, options); + if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`); + } else { + console.log('No Jira user provided. (Got undefined)'); + xrayError = new Error('No Jira user provided.'); + } + } catch (error) { + console.error('Error while deleting XRay step:', error); + xrayError = error; + } + } + + if (!dbError && !xrayError) res.status(200).json({ text: 'Scenario successfully deleted.' }); + else { + let errorMessage = 'Error during deletion: '; + if (dbError) errorMessage += 'Database error. '; + if (xrayError) errorMessage += 'Error deleting XRay step.'; + res.status(500).json({ message: errorMessage }); } }); @@ -265,7 +309,6 @@ router.get('/download/story/:_id', async (req, res) => { try { console.log('download feature-file', req.params._id); const file = await helper.exportSingleFeatureFile(req.params._id); - console.log(file); res.send(file); } catch (error) { handleError(res, error, error, 500); @@ -277,7 +320,6 @@ router.get('/download/project/:repo_id', async (req, res) => { console.log('download project feature-files', req.params.repo_id); const version = req.query.version_id ? req.query.version_id : ''; const file = await helper.exportProjectFeatureFiles(req.params.repo_id, version); - console.log(file); res.send(file); } catch (error) { handleError(res, error, error, 500); diff --git a/backend/src/serverRouter/userRouter.js b/backend/src/serverRouter/userRouter.js index d1dcf724f..7170436b8 100644 --- a/backend/src/serverRouter/userRouter.js +++ b/backend/src/serverRouter/userRouter.js @@ -1,3 +1,4 @@ +/* eslint-disable curly */ /* eslint-disable no-underscore-dangle */ const express = require('express'); const cors = require('cors'); @@ -6,13 +7,15 @@ const passport = require('passport'); const bcrypt = require('bcrypt'); const { v1: uuidv1 } = require('uuid'); const fs = require('fs'); +const crypto = require('crypto'); const initializePassport = require('../passport-config'); const mongo = require('../database/DbServices'); const nodeMail = require('../nodemailer'); const userMng = require('../../dist/helpers/userManagement'); const projectMng = require('../../dist/helpers/projectManagement'); const issueTracker = require('../../dist/models/IssueTracker'); -const crypto = require('crypto'); +const stepDefs = require('../database/stepTypes'); +const xray = require('../../dist/helpers/xray'); const router = express.Router(); const salt = bcrypt.genSaltSync(10); @@ -104,14 +107,14 @@ router.post('/login', (req, res, next) => { req.logIn(user, async (err) => { if (err) throw err; else { - if(user.transitioned === false) { + if (user.transitioned === false) { const hasher = crypto.createHash('sha256'); - hasher.update(req.body.password) - const passHash = hasher.digest() - const finalHash = bcrypt.hashSync(passHash.toString('hex'), salt) - user.password = finalHash - user.transitioned = true - mongo.updateUser(user._id, user) + hasher.update(req.body.password); + const passHash = hasher.digest(); + const finalHash = bcrypt.hashSync(passHash.toString('hex'), salt); + user.password = finalHash; + user.transitioned = true; + mongo.updateUser(user._id, user); } res.json(user); } @@ -332,8 +335,8 @@ router.get('/stories', async (req, res) => { // put into ticketManagement.ts const orderedStories = matchOrder(array, tmpStories, repo); res.status(200).json(orderedStories); }) - .catch((e) => { - console.log(e); + .catch((error) => { + console.error(error); }); } } catch (err) { @@ -342,9 +345,8 @@ router.get('/stories', async (req, res) => { // put into ticketManagement.ts // get Jira Repo / Projects } else if (source === 'jira' && typeof req.user !== 'undefined' && typeof req.user.jira !== 'undefined' && req.query.projectKey !== 'null') { // prepare request - const { projectKey } = req.query; - const jiraTracker = issueTracker.IssueTracker - .getIssueTracker(issueTracker.IssueTrackerOption.JIRA); + const { projectKey, id } = req.query; + const jiraTracker = issueTracker.IssueTracker.getIssueTracker(issueTracker.IssueTrackerOption.JIRA); const clearPass = jiraTracker.decryptPassword(req.user.jira); const { AccountName, AuthMethod, Host } = req.user.jira; let authString = `Bearer ${clearPass}`; @@ -359,7 +361,6 @@ router.get('/stories', async (req, res) => { // put into ticketManagement.ts // need https so request is not redirected // when the request is redirected, the Authorization Header is removed // https://developer.mozilla.org/en-US/docs/Web/API/fetch#headers - const url = `https://${Host}/rest/api/2/search?jql=project=${projectKey}+AND+labels=Seed-Test&startAt=0&maxResults=200`; const options = { method: 'GET', headers: { @@ -368,48 +369,122 @@ router.get('/stories', async (req, res) => { // put into ticketManagement.ts } }; let repo; - let jiraIssues; + const testSets = []; + const preConditionMap = []; try { - jiraIssues = await fetch(url, options) - .then((response) => response.json()); - } catch (e) { console.error(' #### Error during Jira API call: \n', e); } - - try { - repo = await mongo.getOneJiraRepository(req.query.projectKey); - for (const issue of jiraIssues.issues) if (issue.fields.labels.includes('Seed-Test')) { - const story = { - story_id: issue.id, - title: issue.fields.summary, - body: issue.fields.description, - state: issue.fields.status.name, - issue_number: issue.key, - storySource: 'jira' - }; - if (issue.fields.assignee !== null) { - // skip in case of "unassigned" - story.assignee = issue.fields.assignee.name; - story.assignee_avatar_url = issue.fields.assignee.avatarUrls['32x32']; - } else { - story.assignee = 'unassigned'; - story.assignee_avatar_url = null; - } - const entry = await projectMng.fuseStoryWithDb(story, issue.id); - tmpStories.set(entry._id.toString(), entry); - storiesArray.push(entry._id); - } + await fetch( + `https://${Host}/rest/api/2/search?jql=project="${projectKey}"+AND+(labels=Seed-Test+OR+issuetype=Test+OR+issuetype="Test Set"+OR+issuetype="Pre-Condition")&startAt=0&maxResults=200`, + options + ) + .then(async (response) => response.json()) + .then(async (json) => { + try { + repo = await mongo.getOneJiraRepository(req.query.projectKey); + + const asyncHandleTestIssue = json.issues.map(async (issue) => { + // If the issue is a "Test Set" issue + if (issue.fields.issuetype.name === 'Test Set') { + const testsInSet = issue.fields.customfield_14233 || []; + testSets.push({ + testSetKey: issue.key, + testSetId: issue.id, + tests: testsInSet, + xrayTestSet: true + }); + // Return null to indicate that this path does not continue further processing + return null; + } + + // If the issue is a "Pre-Condition" issue + if (issue.fields.issuetype.name === 'Pre-Condition') { + const preCondition = { + preConditionKey: issue.key, + preConditionName: issue.fields.summary, + testSet: [] + }; + // Iterate through the issue links to find the test sets that are linked to the pre-condition + for (const link of issue.fields.issuelinks) { + if (link.inwardIssue && link.type.inward === 'tested by') { + preCondition.testSet.push(link.inwardIssue.key); + } + } + preConditionMap.push(preCondition); + // Similarly, return null for this path + return null; + } + + // If the issue is a "Test" issue + if (issue.fields.issuetype.name === 'Test') { + return xray.handleTestIssue(issue, options, Host); + } + + return { scenarioList: [], testStepDescription: '' }; + }); + + const lstDesc = await Promise.all(asyncHandleTestIssue); + + const stories = []; + + for (const [index, issue] of json.issues.entries()) { + if (!lstDesc[index]) continue; + + let preConditions = []; + + // Check if the custom field for preconditions exists in issue.fields + if (issue.fields.customfield_14229) { + preConditions = issue.fields.customfield_14229; + } + + // Compare the preconditions with preConditionMap to get the final preconditions + const finalPreConditions = preConditionMap.filter((preCondition) => preConditions.includes(preCondition.preConditionKey)); + + const { scenarioList, testStepDescription } = lstDesc[index]; + + const issueDescription = issue.fields.description ? issue.fields.description : ''; + + const story = { + story_id: issue.id, + title: issue.fields.summary, + body: issueDescription + testStepDescription, + scenarios: scenarioList, + state: issue.fields.status.name, + issue_number: issue.key, + storySource: 'jira', + host: Host, + preConditions: finalPreConditions + }; + + if (issue.fields.assignee !== null) { + story.assignee = issue.fields.assignee.name; + story.assignee_avatar_url = issue.fields.assignee.avatarUrls['32x32']; + } else { + story.assignee = 'unassigned'; + story.assignee_avatar_url = null; + } + stories.push(story); + } + const fusing = stories.map((story) => projectMng.fuseStoryWithDb(story, story.story_id)); + await Promise.all(fusing).then((entries) => entries.forEach((entry) => { + tmpStories.set(entry._id.toString(), entry); + storiesArray.push(entry._id); + })); + } catch (e) { + console.error('Error while getting Jira issues:', e); + } + projectMng.updateTestSets(testSets, id); + Promise.all(storiesArray) + .then((array) => { + const orderedStories = matchOrder(array, tmpStories, repo); + res.status(200) + .json(orderedStories); + }) + .catch((e) => { + console.error(e); + }); + }); } catch (e) { console.error(' #### Error while parsing Jira issues:\n', e); } - Promise.all(storiesArray) - .then((array) => { - const orderedStories = matchOrder(array, tmpStories, repo); - res.status(200) - .json(orderedStories); - }) - .catch((e) => { - console.error(e); - }); - // get DB Repo / Projects } else if (source === 'db' && typeof req.user !== 'undefined' && req.query.repoName !== 'null') { const result = await mongo.getAllStoriesOfRepo(req.query.id); diff --git a/backend/src/serverRouter/workgroups.js b/backend/src/serverRouter/workgroups.js index 3e98f1357..db5c14dc5 100644 --- a/backend/src/serverRouter/workgroups.js +++ b/backend/src/serverRouter/workgroups.js @@ -27,7 +27,6 @@ router next(); }) .use((_, __, next) => { - // console.log(_.url + JSON.stringify(_.user)); console.log('Time of workgroups request:', Date.now()); next(); }); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index cfb947990..ccd7a5749 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "seed-test-frontend", - "version": "1.7.3", + "version": "1.8.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "seed-test-frontend", - "version": "1.7.3", + "version": "1.8.0", "dependencies": { "@angular-devkit/build-angular": "^18.2.12", "@angular/animations": "^18.2.13", @@ -26,12 +26,12 @@ "@ngneat/until-destroy": "^10.0.0", "canvg": "^4.0.2", "core-js": "^3.39.0", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.21.1", "file-saver": "^2.0.5", "hammerjs": "^2.0.8", "html2canvas": "1.4.1", - "jest-preset-angular": "^14.4.1", + "jest-preset-angular": "^14.4.2", "jquery": "^3.7.1", "jspdf": "^2.5.2", "ngx-cookie-service": "^18.0.0", @@ -43,17 +43,17 @@ "zone.js": "^0.14.10" }, "devDependencies": { - "@angular-eslint/builder": "18.4.2", - "@angular-eslint/eslint-plugin": "18.4.2", - "@angular-eslint/eslint-plugin-template": "18.4.2", - "@angular-eslint/schematics": "18.4.2", - "@angular-eslint/template-parser": "18.4.2", + "@angular-eslint/builder": "18.4.3", + "@angular-eslint/eslint-plugin": "18.4.3", + "@angular-eslint/eslint-plugin-template": "18.4.3", + "@angular-eslint/schematics": "18.4.3", + "@angular-eslint/template-parser": "18.4.3", "@angular/compiler-cli": "^18.2.13", "@compodoc/compodoc": "^1.1.26", "@popperjs/core": "^2.11.8", "@types/jest": "^29.5.14", - "@typescript-eslint/eslint-plugin": "^8.16.0", - "@typescript-eslint/parser": "^8.16.0", + "@typescript-eslint/eslint-plugin": "^8.17.0", + "@typescript-eslint/parser": "^8.17.0", "eslint": "^8.57.1", "jest": "^29.7.0", "jest-canvas-mock": "^2.5.2", @@ -898,32 +898,36 @@ } }, "node_modules/@angular-eslint/builder": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/builder/-/builder-18.4.2.tgz", - "integrity": "sha512-eyI9sreaM9ukA24PCJoSqsjCYOiBf3TZ/Q1WY8PG0SwQWc03qJNqPl5K+/Ptmsc1RtoDCLCU6uaOBFPhb9lDxw==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/builder/-/builder-18.4.3.tgz", + "integrity": "sha512-NzmrXlr7GFE+cjwipY/CxBscZXNqnuK0us1mO6Z2T6MeH6m+rRcdlY/rZyKoRniyNNvuzl6vpEsfMIMmnfebrA==", "dev": true, "license": "MIT", + "dependencies": { + "@angular-devkit/architect": ">= 0.1800.0 < 0.1900.0", + "@angular-devkit/core": ">= 18.0.0 < 19.0.0" + }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": "*" } }, "node_modules/@angular-eslint/bundled-angular-compiler": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/bundled-angular-compiler/-/bundled-angular-compiler-18.4.2.tgz", - "integrity": "sha512-K7pqmZI3Dl75zlLexyaM7bw4xdgk/3bhP1B6uqDKML9+vIIvccCR2bGvqFurqeFbJlMykzb3H4jytT+HpqV4tg==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/bundled-angular-compiler/-/bundled-angular-compiler-18.4.3.tgz", + "integrity": "sha512-zdrA8mR98X+U4YgHzUKmivRU+PxzwOL/j8G7eTOvBuq8GPzsP+hvak+tyxlgeGm9HsvpFj9ERHLtJ0xDUPs8fg==", "dev": true, "license": "MIT" }, "node_modules/@angular-eslint/eslint-plugin": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/eslint-plugin/-/eslint-plugin-18.4.2.tgz", - "integrity": "sha512-Oem4W2P54cPADN9rJenLj90rqDPUQWx5kZiz84FCnsSn5DBdsI5LGQoogNT9y3Jx/9VL/VGIMMA5B6qG+0hVlg==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/eslint-plugin/-/eslint-plugin-18.4.3.tgz", + "integrity": "sha512-AyJbupiwTBR81P6T59v+aULEnPpZBCBxL2S5QFWfAhNCwWhcof4GihvdK2Z87yhvzDGeAzUFSWl/beJfeFa+PA==", "dev": true, "license": "MIT", "dependencies": { - "@angular-eslint/bundled-angular-compiler": "18.4.2", - "@angular-eslint/utils": "18.4.2" + "@angular-eslint/bundled-angular-compiler": "18.4.3", + "@angular-eslint/utils": "18.4.3" }, "peerDependencies": { "@typescript-eslint/utils": "^7.11.0 || ^8.0.0", @@ -932,14 +936,14 @@ } }, "node_modules/@angular-eslint/eslint-plugin-template": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/eslint-plugin-template/-/eslint-plugin-template-18.4.2.tgz", - "integrity": "sha512-v9msmIdZK6lOEC4ScDeYKFLpszpJ5Ei+8ifkT7fXXKmPaWtPJtMbW+VGOUNm5Ezi+xByAGCn1qU+OF2aJ/4CLw==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/eslint-plugin-template/-/eslint-plugin-template-18.4.3.tgz", + "integrity": "sha512-ijGlX2N01ayMXTpeQivOA31AszO8OEbu9ZQUCxnu9AyMMhxyi2q50bujRChAvN9YXQfdQtbxuajxV6+aiWb5BQ==", "dev": true, "license": "MIT", "dependencies": { - "@angular-eslint/bundled-angular-compiler": "18.4.2", - "@angular-eslint/utils": "18.4.2", + "@angular-eslint/bundled-angular-compiler": "18.4.3", + "@angular-eslint/utils": "18.4.3", "aria-query": "5.3.2", "axobject-query": "4.1.0" }, @@ -951,21 +955,19 @@ } }, "node_modules/@angular-eslint/schematics": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/schematics/-/schematics-18.4.2.tgz", - "integrity": "sha512-pZCc3NhfwRT5S0DGXTzKbl3dD4I8K4LRYot+Aq4rzY5LtiGHDSi4PKu2M0OBSRrQFQXq7/2gDXGO0AvH6LX97w==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/schematics/-/schematics-18.4.3.tgz", + "integrity": "sha512-D5maKn5e6n58+8n7jLFLD4g+RGPOPeDSsvPc1sqial5tEKLxAJQJS9WZ28oef3bhkob6C60D+1H0mMmEEVvyVA==", "dev": true, "license": "MIT", "dependencies": { - "@angular-eslint/eslint-plugin": "18.4.2", - "@angular-eslint/eslint-plugin-template": "18.4.2", + "@angular-devkit/core": ">= 18.0.0 < 19.0.0", + "@angular-devkit/schematics": ">= 18.0.0 < 19.0.0", + "@angular-eslint/eslint-plugin": "18.4.3", + "@angular-eslint/eslint-plugin-template": "18.4.3", "ignore": "6.0.2", "semver": "7.6.3", "strip-json-comments": "3.1.1" - }, - "peerDependencies": { - "@angular-devkit/core": ">= 18.0.0 < 19.0.0", - "@angular-devkit/schematics": ">= 18.0.0 < 19.0.0" } }, "node_modules/@angular-eslint/schematics/node_modules/ignore": { @@ -979,13 +981,13 @@ } }, "node_modules/@angular-eslint/template-parser": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/template-parser/-/template-parser-18.4.2.tgz", - "integrity": "sha512-KGjDLUxMsdjaxC+8VTxCG07Q6qshOTWMYTvp2LZ4QBySDQnQuFwsIJIJfU8jJwzJCkPKfVpnyuHggAn7fdYnxA==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/template-parser/-/template-parser-18.4.3.tgz", + "integrity": "sha512-JZMPtEB8yNip3kg4WDEWQyObSo2Hwf+opq2ElYuwe85GQkGhfJSJ2CQYo4FSwd+c5MUQAqESNRg9QqGYauDsiw==", "dev": true, "license": "MIT", "dependencies": { - "@angular-eslint/bundled-angular-compiler": "18.4.2", + "@angular-eslint/bundled-angular-compiler": "18.4.3", "eslint-scope": "^8.0.2" }, "peerDependencies": { @@ -1010,13 +1012,13 @@ } }, "node_modules/@angular-eslint/utils": { - "version": "18.4.2", - "resolved": "https://registry.npmjs.org/@angular-eslint/utils/-/utils-18.4.2.tgz", - "integrity": "sha512-+c0r33QSkAnGmu/DYAPfzJJk5QDX4TP2d6EFtsenrufqRkZqrOcK4Q5t61J92Ukkr03XoqTzTDSBjlwAfM56Rw==", + "version": "18.4.3", + "resolved": "https://registry.npmjs.org/@angular-eslint/utils/-/utils-18.4.3.tgz", + "integrity": "sha512-w0bJ9+ELAEiPBSTPPm9bvDngfu1d8JbzUhvs2vU+z7sIz/HMwUZT5S4naypj2kNN0gZYGYrW0lt+HIbW87zTAQ==", "dev": true, "license": "MIT", "dependencies": { - "@angular-eslint/bundled-angular-compiler": "18.4.2" + "@angular-eslint/bundled-angular-compiler": "18.4.3" }, "peerDependencies": { "@typescript-eslint/utils": "^7.11.0 || ^8.0.0", @@ -7339,17 +7341,17 @@ "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.16.0.tgz", - "integrity": "sha512-5YTHKV8MYlyMI6BaEG7crQ9BhSc8RxzshOReKwZwRWN0+XvvTOm+L/UYLCYxFpfwYuAAqhxiq4yae0CMFwbL7Q==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.17.0.tgz", + "integrity": "sha512-HU1KAdW3Tt8zQkdvNoIijfWDMvdSweFYm4hWh+KwhPstv+sCmWb89hCIP8msFm9N1R/ooh9honpSuvqKWlYy3w==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/type-utils": "8.16.0", - "@typescript-eslint/utils": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", + "@typescript-eslint/scope-manager": "8.17.0", + "@typescript-eslint/type-utils": "8.17.0", + "@typescript-eslint/utils": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -7373,16 +7375,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.16.0.tgz", - "integrity": "sha512-D7DbgGFtsqIPIFMPJwCad9Gfi/hC0PWErRRHFnaCWoEDYi5tQUDiJCTmGUbBiLzjqAck4KcXt9Ayj0CNlIrF+w==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.17.0.tgz", + "integrity": "sha512-Drp39TXuUlD49F7ilHHCG7TTg8IkA+hxCuULdmzWYICxGXvDXmDmWEjJYZQYgf6l/TFfYNE167m7isnc3xlIEg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/typescript-estree": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", + "@typescript-eslint/scope-manager": "8.17.0", + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/typescript-estree": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0", "debug": "^4.3.4" }, "engines": { @@ -7402,14 +7404,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.16.0.tgz", - "integrity": "sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.17.0.tgz", + "integrity": "sha512-/ewp4XjvnxaREtqsZjF4Mfn078RD/9GmiEAtTeLQ7yFdKnqwTOgRMSvFz4et9U5RiJQ15WTGXPLj89zGusvxBg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0" + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7420,14 +7422,14 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.16.0.tgz", - "integrity": "sha512-IqZHGG+g1XCWX9NyqnI/0CX5LL8/18awQqmkZSl2ynn8F76j579dByc0jhfVSnSnhf7zv76mKBQv9HQFKvDCgg==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.17.0.tgz", + "integrity": "sha512-q38llWJYPd63rRnJ6wY/ZQqIzPrBCkPdpIsaCfkR3Q4t3p6sb422zougfad4TFW9+ElIFLVDzWGiGAfbb/v2qw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.16.0", - "@typescript-eslint/utils": "8.16.0", + "@typescript-eslint/typescript-estree": "8.17.0", + "@typescript-eslint/utils": "8.17.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -7448,9 +7450,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.16.0.tgz", - "integrity": "sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.17.0.tgz", + "integrity": "sha512-gY2TVzeve3z6crqh2Ic7Cr+CAv6pfb0Egee7J5UAVWCpVvDI/F71wNfolIim4FE6hT15EbpZFVUj9j5i38jYXA==", "dev": true, "license": "MIT", "engines": { @@ -7462,14 +7464,14 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.16.0.tgz", - "integrity": "sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.17.0.tgz", + "integrity": "sha512-JqkOopc1nRKZpX+opvKqnM3XUlM7LpFMD0lYxTqOTKQfCWAmxw45e3qlOCsEqEB2yuacujivudOFpCnqkBDNMw==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/visitor-keys": "8.16.0", + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/visitor-keys": "8.17.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -7491,16 +7493,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.16.0.tgz", - "integrity": "sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.17.0.tgz", + "integrity": "sha512-bQC8BnEkxqG8HBGKwG9wXlZqg37RKSMY7v/X8VEWD8JG2JuTHuNK0VFvMPMUKQcbk6B+tf05k+4AShAEtCtJ/w==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.16.0", - "@typescript-eslint/types": "8.16.0", - "@typescript-eslint/typescript-estree": "8.16.0" + "@typescript-eslint/scope-manager": "8.17.0", + "@typescript-eslint/types": "8.17.0", + "@typescript-eslint/typescript-estree": "8.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7519,13 +7521,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.16.0.tgz", - "integrity": "sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.17.0.tgz", + "integrity": "sha512-1Hm7THLpO6ww5QU6H/Qp+AusUUl+z/CAm3cNZZ0jQvon9yicgO7Rwd+/WWRpMKLYV6p2UvdbR27c86rzCPpreg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.16.0", + "@typescript-eslint/types": "8.17.0", "eslint-visitor-keys": "^4.2.0" }, "engines": { @@ -9893,9 +9895,10 @@ "dev": true }, "node_modules/dotenv": { - "version": "16.4.5", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", - "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "version": "16.4.7", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "license": "BSD-2-Clause", "engines": { "node": ">=12" }, @@ -13120,9 +13123,9 @@ } }, "node_modules/jest-preset-angular": { - "version": "14.4.1", - "resolved": "https://registry.npmjs.org/jest-preset-angular/-/jest-preset-angular-14.4.1.tgz", - "integrity": "sha512-6QBP9SN+VVilghc5hjWzJ4ZBrBB4Djl2fO5uyjJhIWEq/r9255fAyDNHfoigdUbx3l4MRVwwyiTMXRsFAZE4XQ==", + "version": "14.4.2", + "resolved": "https://registry.npmjs.org/jest-preset-angular/-/jest-preset-angular-14.4.2.tgz", + "integrity": "sha512-BYYv0FaTDfBNh8WyA9mpOV3krfw20kurBGK8INZUnv7KZDAWZuQtCET4TwTWxSNQ9jS1OX1+a5weCm/bTDDM1A==", "license": "MIT", "dependencies": { "bs-logger": "^0.2.6", diff --git a/frontend/package.json b/frontend/package.json index 9eae44357..48e4b28b0 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -14,7 +14,7 @@ "test:debug": "node --inspect node_modules/.bin/jest --runInBand", "lint": "ng lint", "prod": "ng serve -c production", - "dev": "ng serve", + "dev": "ng serve & node server.js", "compodoc": "./node_modules/.bin/compodoc -p tsconfig.json -w -s" }, "private": true, @@ -37,12 +37,12 @@ "@ngneat/until-destroy": "^10.0.0", "canvg": "^4.0.2", "core-js": "^3.39.0", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.21.1", "file-saver": "^2.0.5", "hammerjs": "^2.0.8", "html2canvas": "1.4.1", - "jest-preset-angular": "^14.4.1", + "jest-preset-angular": "^14.4.2", "jquery": "^3.7.1", "jspdf": "^2.5.2", "ngx-cookie-service": "^18.0.0", @@ -54,17 +54,17 @@ "zone.js": "^0.14.10" }, "devDependencies": { - "@angular-eslint/builder": "18.4.2", - "@angular-eslint/eslint-plugin": "18.4.2", - "@angular-eslint/eslint-plugin-template": "18.4.2", - "@angular-eslint/schematics": "18.4.2", - "@angular-eslint/template-parser": "18.4.2", + "@angular-eslint/builder": "18.4.3", + "@angular-eslint/eslint-plugin": "18.4.3", + "@angular-eslint/eslint-plugin-template": "18.4.3", + "@angular-eslint/schematics": "18.4.3", + "@angular-eslint/template-parser": "18.4.3", "@angular/compiler-cli": "^18.2.13", "@compodoc/compodoc": "^1.1.26", "@popperjs/core": "^2.11.8", "@types/jest": "^29.5.14", - "@typescript-eslint/eslint-plugin": "^8.16.0", - "@typescript-eslint/parser": "^8.16.0", + "@typescript-eslint/eslint-plugin": "^8.17.0", + "@typescript-eslint/parser": "^8.17.0", "eslint": "^8.57.1", "jest": "^29.7.0", "jest-canvas-mock": "^2.5.2", diff --git a/frontend/src/app/Services/group.service.ts b/frontend/src/app/Services/group.service.ts index ff14c2f60..dc9fa68a3 100644 --- a/frontend/src/app/Services/group.service.ts +++ b/frontend/src/app/Services/group.service.ts @@ -135,4 +135,13 @@ export class GroupService { return this.http .post(this.apiService.apiServer + '/run/Group/' + repoID + '/' + groupID, params, { withCredentials: true, headers: new HttpHeaders({ timeout: `${timeout}` }) }); } + + /* + * Running a temporary group with precondition storys + */ + runTempGroup(params): Observable { + const timeout = 6000000; + return this.http + .post(this.apiService.apiServer + '/run/TempGroup', params, { withCredentials: true, headers: new HttpHeaders({ timeout: `${timeout}` }) }); + } } diff --git a/frontend/src/app/Services/http-logger.interceptor.ts b/frontend/src/app/Services/http-logger.interceptor.ts index c12a2a858..8093ebe72 100644 --- a/frontend/src/app/Services/http-logger.interceptor.ts +++ b/frontend/src/app/Services/http-logger.interceptor.ts @@ -20,7 +20,7 @@ export class HttpLoggerInterceptor implements HttpInterceptor { reqMethod: req.method, reqURL: req.urlWithParams, }; - this.logger.log("sended request", detail, req.headers); + this.logger.log("sent request", detail, req.headers); return next.handle(req).pipe( tap((event) => { diff --git a/frontend/src/app/Services/scenario.service.ts b/frontend/src/app/Services/scenario.service.ts index 90f98e714..7ffd77ce8 100644 --- a/frontend/src/app/Services/scenario.service.ts +++ b/frontend/src/app/Services/scenario.service.ts @@ -5,6 +5,8 @@ import { ApiService } from '../Services/api.service'; import { HttpClient } from '@angular/common/http'; import { tap } from 'rxjs/operators'; import { Story } from '../model/Story'; +import { HttpHeaders } from '@angular/common/http'; + /** * Service for communication between scenario component and the backend @@ -36,8 +38,9 @@ export class ScenarioService { /** * Emits the delete scenario event */ - public deleteScenarioEmitter() { - this.deleteScenarioEvent.emit(); + public deleteScenarioEmitter(xrayEnabled: boolean) { + console.log('Xray enabled: ' + xrayEnabled) + this.deleteScenarioEvent.emit(xrayEnabled); } /* Emits scenario changed event */ public scenarioChangedEmitter() { @@ -63,7 +66,7 @@ export class ScenarioService { /* Updating scenario list */ public updateScenarioList(story_id, scenario_list: Scenario[]): Observable { return this.http - .patch(this.apiService.apiServer + '/story/' + story_id , scenario_list, ApiService.getOptions()) + .patch(this.apiService.apiServer + '/story/' + story_id, scenario_list, ApiService.getOptions()) .pipe(tap(_ => { // })); @@ -82,14 +85,14 @@ export class ScenarioService { console.log('Add new scenario in story ' + storyID + '!', resp); })); } - /** - * Add the First Scenario - * @param storyID - * @returns - */ + /** + * Add the First Scenario + * @param storyID + * @returns +*/ public addFirstScenario(storyID): Observable {// not used ? return this.http - .get(this.apiService.apiServer + '/mongo/scenario/add/' + storyID , ApiService.getOptions())// route doesn't exist + .get(this.apiService.apiServer + '/mongo/scenario/add/' + storyID, ApiService.getOptions())// route doesn't exist .pipe(tap(resp => { console.log('Add new scenario in story ' + storyID + '!', resp); })); @@ -125,11 +128,24 @@ export class ScenarioService { * @param scenario * @returns */ - deleteScenario(storyID: any, scenario: Scenario): Observable { - return this.http - .delete(this.apiService.apiServer + '/story/scenario/' + storyID + '/' + scenario.scenario_id, ApiService.getOptions()) - .pipe(tap(() => { - // - })); + deleteScenario(storyID: any, scenario: Scenario, xrayEnabled: boolean): Observable { + if (xrayEnabled) { + console.log('Xray enabled no 2: ' + xrayEnabled) + const headers = new HttpHeaders() + .set('x-xray-enabled', xrayEnabled.toString()) + .set('x-test-key', scenario.testKey.toString()); + const options = { headers: headers, ...ApiService.getOptions() }; + return this.http + .delete(this.apiService.apiServer + '/story/scenario/' + storyID + '/' + scenario.scenario_id, options) + .pipe(tap(() => { + // + })); + } else { + return this.http + .delete(this.apiService.apiServer + '/story/scenario/' + storyID + '/' + scenario.scenario_id, ApiService.getOptions()) + .pipe(tap(() => { + // + })); + } } } diff --git a/frontend/src/app/Services/story.service.ts b/frontend/src/app/Services/story.service.ts index 8796cfa4c..33db5d11b 100644 --- a/frontend/src/app/Services/story.service.ts +++ b/frontend/src/app/Services/story.service.ts @@ -84,6 +84,16 @@ export class StoryService { // })); } + + /** + * Get's single Story by Issue Key + * @param issueKey + * @return single Story object + */ + public getStoryByIssueKey(issueKey: string): Observable { + return this.http.get(`${this.apiService.apiServer}/story/issueKey/${issueKey}`, ApiService.getOptions()); + } + /** * Creates a story * @param title @@ -238,4 +248,18 @@ export class StoryService { return window.open(s); } } + +// /** +// * Updates XRay status in Jira for given step and testrun +// */ +// updateXrayStatus(testRunId, stepId, status) { +// const data = { +// testRunId: testRunId, +// stepId: stepId, +// status: status +// }; +// return this.http +// .put(this.apiService.apiServer + '/jira/update-xray-status/', data, ApiService.getOptions()) +// .pipe(tap()); +// } } diff --git a/frontend/src/app/Services/xray.service.spec.ts b/frontend/src/app/Services/xray.service.spec.ts new file mode 100644 index 000000000..06951e715 --- /dev/null +++ b/frontend/src/app/Services/xray.service.spec.ts @@ -0,0 +1,16 @@ +import { TestBed } from '@angular/core/testing'; + +import { XrayService } from './xray.service'; + +describe('XrayService', () => { + let service: XrayService; + + beforeEach(() => { + TestBed.configureTestingModule({}); + service = TestBed.inject(XrayService); + }); + + it('should be created', () => { + expect(service).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/Services/xray.service.ts b/frontend/src/app/Services/xray.service.ts new file mode 100644 index 000000000..d87b1c27c --- /dev/null +++ b/frontend/src/app/Services/xray.service.ts @@ -0,0 +1,86 @@ +import { Injectable } from '@angular/core'; +import { tap } from 'rxjs'; +import { ApiService } from '../Services/api.service'; +import { HttpClient } from '@angular/common/http'; +import { StoryService } from './story.service'; +import { Scenario } from '../model/Scenario'; + +@Injectable({ + providedIn: 'root' +}) +export class XrayService { + + constructor(public apiService: ApiService, private http: HttpClient, public storyService: StoryService) { } + + /** + * Updates the Xray status for a single scenario given the selected test executions. + * + * @param {Scenario} scenario - The scenario containing the test run steps. + * @param {number[]} selectedExecutions - List of selected test execution IDs. + * @param {string} status - The status to update (e.g., 'PASS', 'FAIL'). + * @returns {Promise} A promise that resolves when the status update is complete. + */ + async updateXrayStatus(scenario: Scenario, selectedExecutions: number[], status: string): Promise { + if (scenario.testRunSteps && scenario.testRunSteps.length > 0) { + for (const testRun of scenario.testRunSteps) { + if (selectedExecutions.includes(testRun.testRunId)) { + try { + await this.sendXrayStatus(testRun.testRunId, testRun.testRunStepId, status).toPromise(); + console.log('XRay update successful for TestRunStepId:', testRun.testRunStepId, " and Test Execution:", testRun.testExecKey); + } catch (error) { + console.error('Error while updating XRay status for TestRunStepId:', testRun.testRunStepId, error); + } + } + } + } + } + + /** + * Sends the Xray status to the backend given a specific test run and step. + * + * @param {number} testRunId - The ID of the test run. + * @param {number} stepId - The ID of the test run step. + * @param {string} status - The status to update (e.g., 'PASS', 'FAIL'). + * @returns {Observable} An observable that emits the result of the HTTP PUT request. + */ + sendXrayStatus(testRunId: number, stepId: number, status: string) { + const data = { + testRunId: testRunId, + stepId: stepId, + status: status + }; + return this.http + .put(this.apiService.apiServer + '/jira/update-xray-status/', data, ApiService.getOptions()) + .pipe(tap()); + } + + /** + * Retrieves the stories for each precondition. + * + * @param {any[]} preConditions - An array of preconditions, each containing test sets. + * @returns {any[]} An array of objects representing the results, including precondition details and associated stories. + */ + getPreconditionStories(preConditions: any[]): any[] { + let preConditionResults = []; + for (const precondition of preConditions) { + + const testSetPromises = precondition.testSet.map(testKey => + this.storyService.getStoryByIssueKey(testKey).toPromise() + ); + + Promise.all(testSetPromises) + .then(stories => { + const results = { + preConditionKey: precondition.preConditionKey, + preConditionName: precondition.preConditionName, + stories: stories + }; + preConditionResults.push(results); + }) + .catch(error => { + console.error('Failed to fetch stories for a test set:', error); + }); + } + return preConditionResults; + } +} diff --git a/frontend/src/app/app.module.ts b/frontend/src/app/app.module.ts index 13ab6e40d..23677573a 100644 --- a/frontend/src/app/app.module.ts +++ b/frontend/src/app/app.module.ts @@ -39,6 +39,7 @@ import { CarouselModule } from "ngx-owl-carousel-o"; import { ResetPasswordComponent } from "./reset-password/reset-password.component"; import { ConfirmResetPasswordComponent } from "./confirm-reset-password/confirm-reset-password.component"; import { DeleteToast } from "./delete-toast"; +import { XrayToast } from "./delete-toast-xray"; import { DEFAULT_TIMEOUT, TimeoutInterceptor, @@ -85,111 +86,119 @@ import { MatDialogModule } from '@angular/material/dialog'; import { MatMenuModule } from '@angular/material/menu'; import { WindowSizeComponent } from './modals/window-size/window-size.component'; import { FileExplorerModalComponent } from "./modals/file-explorer-modal/file-explorer-modal.component"; +import { ExecutionListComponent } from './modals/execution-list/execution-list.component'; import { FileManagerComponent } from "./file-manager/file-manager.component"; import {MatCheckboxModule} from '@angular/material/checkbox'; -@NgModule({ declarations: [ - AppComponent, - ScenarioEditorComponent, - StoriesBarComponent, - ParentComponent, - LoginComponent, - ExampleTableComponent, - ViewModeDirective, - EditModeDirective, - EditableComponent, - FocusableDirective, - EditableOnEnterDirective, - FeedbackComponent, - TermsComponent, - AccountManagementComponent, - StoryEditorComponent, - RegistrationComponent, - RegistrationComponent, - PasswordConfirmedValidatorDirective, - ReportComponent, - InfoWarningToast, - ResetPasswordComponent, - ConfirmResetPasswordComponent, - ReportHistoryComponent, - LayoutModalComponent, - CreateNewGroupComponent, - CreateCustomProjectComponent, - DisconnectJiraAccountComponent, - DeleteAccountComponent, - AddBlockFormComponent, - SaveBlockFormComponent, - NewStepRequestComponent, - RenameScenarioComponent, - RenameStoryComponent, - WorkgroupEditComponent, - CreateNewStoryComponent, - UpdateGroupComponent, - ChangeJiraAccountComponent, - RepoSwichComponent, - CreateScenarioComponent, - EditBlockComponent, - ResizeInputDirective, - RenameBackgroundComponent, - BaseEditorComponent, - NewExampleComponent, - ExampleComponent, - DeleteToast, - TransferOwnershipToast, - ImportModalComponent, - ConfirmResetPasswordPopupComponent, - WindowSizeComponent, - FileExplorerModalComponent, - FileManagerComponent, +@NgModule({ + declarations: [ + AppComponent, + ScenarioEditorComponent, + StoriesBarComponent, + ParentComponent, + LoginComponent, + ExampleTableComponent, + ViewModeDirective, + EditModeDirective, + EditableComponent, + FocusableDirective, + EditableOnEnterDirective, + FeedbackComponent, + TermsComponent, + AccountManagementComponent, + StoryEditorComponent, + RegistrationComponent, + PasswordConfirmedValidatorDirective, + ReportComponent, + InfoWarningToast, + ResetPasswordComponent, + ConfirmResetPasswordComponent, + ReportHistoryComponent, + LayoutModalComponent, + CreateNewGroupComponent, + CreateCustomProjectComponent, + DisconnectJiraAccountComponent, + DeleteAccountComponent, + AddBlockFormComponent, + SaveBlockFormComponent, + NewStepRequestComponent, + RenameScenarioComponent, + RenameStoryComponent, + WorkgroupEditComponent, + CreateNewStoryComponent, + UpdateGroupComponent, + ChangeJiraAccountComponent, + RepoSwichComponent, + CreateScenarioComponent, + EditBlockComponent, + ResizeInputDirective, + RenameBackgroundComponent, + BaseEditorComponent, + NewExampleComponent, + ExampleComponent, + DeleteToast, + XrayToast, + TransferOwnershipToast, + ImportModalComponent, + ConfirmResetPasswordPopupComponent, + WindowSizeComponent, + FileExplorerModalComponent, + ExecutionListComponent, + FileManagerComponent, + ], + imports: [ + NgbModule, + BrowserModule, + BrowserAnimationsModule, + FormsModule, + ReactiveFormsModule, + MatTableModule, + MatListModule, + MatSelectModule, + RouterModule.forRoot(ROUTES), + FormsModule, + ClipboardModule, + DragDropModule, + MatProgressSpinnerModule, + CarouselModule, + LoggerModule.forRoot({ + serverLoggingUrl: localStorage.getItem("url_backend") + "/user/log", + level: NgxLoggerLevel.DEBUG, + serverLogLevel: NgxLoggerLevel.DEBUG, + }), + ToastrModule.forRoot({ + timeOut: 3000, + }), + MatSlideToggleModule, + MatIconModule, + MatExpansionModule, + MatTabsModule, + MatDialogModule, + MatTooltipModule, + MatDialogModule, + MatFormFieldModule, + MatInputModule, + MatMenuModule, + MatCheckboxModule + ], + providers: [ + ApiService, + AuthGuard, + CookieService, + [{ provide: HTTP_INTERCEPTORS, useClass: TimeoutInterceptor, multi: true }], + [ + { + provide: HTTP_INTERCEPTORS, + useClass: HttpLoggerInterceptor, + multi: true, + }, ], - bootstrap: [AppComponent], - schemas: [CUSTOM_ELEMENTS_SCHEMA], imports: [NgbModule, - BrowserModule, - BrowserAnimationsModule, - FormsModule, - ReactiveFormsModule, - MatTableModule, - MatListModule, - MatSelectModule, - RouterModule.forRoot(ROUTES), - FormsModule, - ClipboardModule, - DragDropModule, - MatProgressSpinnerModule, - CarouselModule, - LoggerModule.forRoot({ - serverLoggingUrl: localStorage.getItem("url_backend") + "/user/log", - level: NgxLoggerLevel.DEBUG, - serverLogLevel: NgxLoggerLevel.DEBUG, - }), - ToastrModule.forRoot({ - timeOut: 3000, - }), - MatSlideToggleModule, - MatIconModule, - MatExpansionModule, - MatTabsModule, - MatDialogModule, - MatTooltipModule, - MatDialogModule, - MatFormFieldModule, - MatInputModule, - MatMenuModule, - MatCheckboxModule], providers: [ - ApiService, - AuthGuard, - CookieService, - [{ provide: HTTP_INTERCEPTORS, useClass: TimeoutInterceptor, multi: true }], - [ - { - provide: HTTP_INTERCEPTORS, - useClass: HttpLoggerInterceptor, - multi: true, - }, - ], - [{ provide: DEFAULT_TIMEOUT, useValue: 120000 }], - ThemingService, - provideHttpClient(withInterceptorsFromDi()), - ] }) + [{ provide: DEFAULT_TIMEOUT, useValue: 120000 }], + ThemingService, + provideHttpClient(withInterceptorsFromDi()), + ], + bootstrap: [AppComponent], + schemas: [CUSTOM_ELEMENTS_SCHEMA], +}) export class AppModule {} diff --git a/frontend/src/app/base-editor/base-editor.component.css b/frontend/src/app/base-editor/base-editor.component.css index 69fd60f0a..58f75910c 100644 --- a/frontend/src/app/base-editor/base-editor.component.css +++ b/frontend/src/app/base-editor/base-editor.component.css @@ -505,6 +505,40 @@ input.background { filter: invert(54%) brightness(92%); } +.origin-container { + margin-left: 10px; + padding: 5px 5px; + border: 3px solid var(--ocean-blue); + font-size: 12px; + border-radius: 10px; + display: inline-block; + background-color: #f9f9f9; + font-style: italic; + box-shadow: 0 2px 5px rgba(0,0,0,0.1); +} + +.xray-button { + display: flex; + align-items: center; + padding: 10px 10px; + border-radius: 5px; + cursor: pointer; + font-size: 13px; + font-weight: bold; + font-style: italic; + color: var(--ocean-blue); + margin-right: 10px; +} + +.xray-button img { + height: 25px; +} + +.xray-button span { + margin-right: 5px; +} + + @media screen and (max-height: 700px) { ul.stepsList{ height: 250px; diff --git a/frontend/src/app/base-editor/base-editor.component.html b/frontend/src/app/base-editor/base-editor.component.html index 71a31e610..dc6370558 100644 --- a/frontend/src/app/base-editor/base-editor.component.html +++ b/frontend/src/app/base-editor/base-editor.component.html @@ -3,82 +3,88 @@ - - -
-
- -
- - - - -
- - -
-
- + + + +
+ + +
+
+ - - -
-
    - -
-
-
- -
+ + + +
+
    + +
+
+ + +
@@ -237,7 +243,7 @@ {{currentStep.post}} - +
@@ -273,6 +279,9 @@
+
+ {{ currentStep.origin || 'Unknown Origin' }} +
-
+
+ + +
+
+ + + + +
Pre-Conditions
+
+
+
+ + + + + + {{ precondition.preConditionKey }}: {{ precondition.preConditionName }} + + + + +
+ + + + + + + + + + + + + + + + + + + + +
CUC Key + {{ story.issue_number }} + + Name{{ story.title }}Seed-Story + + {{ story.issue_number }}. {{ story.title }} + +
+
+
+
+
+
+
+
+
@@ -157,171 +222,188 @@

You are not authorized to use this project


- -