Skip to content

Commit

Permalink
fix: fixed dashboard scripts (#3304)
Browse files Browse the repository at this point in the history
Co-authored-by: Ansh Goyal <[email protected]>%0ACo-authored-by: asyncapi-bot <[email protected]>
  • Loading branch information
akshatnema and asyncapi-bot authored Oct 23, 2024
1 parent 7b70861 commit 2be7824
Show file tree
Hide file tree
Showing 3 changed files with 106 additions and 102 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/regenerate-meetings-and-videos.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: List everyday latest list of AsyncAPI Meetings, Newsroom Videos and Dashboard data.

on:
on:
workflow_dispatch:
schedule:
#every day at midnight
Expand All @@ -23,7 +23,7 @@ jobs:
- name: Check package-lock version
uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master
id: lockversion

- name: Use Node.js
uses: actions/setup-node@v3
with:
Expand All @@ -45,7 +45,7 @@ jobs:
committer: asyncapi-bot <[email protected]>
author: asyncapi-bot <[email protected]>
title: 'chore: update meetings.json and newsrooom_videos.json'
branch: update-meetings/${{ github.job }}
branch: update-meetings/${{ github.sha }}
- if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel
name: Report workflow run status to Slack
uses: 8398a7/action-slack@fbd6aa58ba854a740e11a35d0df80cb5d12101d8 #using https://github.com/8398a7/action-slack/releases/tag/v3.15.1
Expand All @@ -54,4 +54,4 @@ jobs:
fields: repo,action,workflow
text: 'AsyncAPI Meetings and Videos workflow failed'
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }}
10 changes: 2 additions & 8 deletions components/newsroom/Newsroom.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import React from 'react';
import { TwitterTimelineEmbed } from 'react-twitter-embed';

import { HeadingLevel, HeadingTypeStyle } from '@/types/typography/Heading';
import { ParagraphTypeStyle } from '@/types/typography/Paragraph';
Expand Down Expand Up @@ -69,19 +68,14 @@ export default function Newsroom() {
</div>
</div>

<div className='w-full flex-row items-stretch justify-between md:flex lg:w-3/4'>
<div className='w-full flex-row items-stretch justify-between md:flex md:h-120 lg:w-3/4'>
<div className='relative flex w-full flex-col overflow-y-auto md:w-1/2'>
<div className='min-h-0'>
<div className='md:t-0 md:b-0 md:l-0 md:r-0 size-full max-h-120 md:absolute'>
<div className='md:t-0 md:b-0 md:l-0 md:r-0 size-full md:absolute'>
<NewsroomArticle />
</div>
</div>
</div>
<div className='w-full px-2 md:w-1/2 md:pl-4 md:pr-0'>
<div className='mx-auto mt-8 w-full rounded-xl shadow-md md:mt-0' data-testid='Newsroom-Twitter'>
<TwitterTimelineEmbed sourceType='profile' screenName='AsyncAPISpec' options={{ tweetLimit: '2' }} />
</div>
</div>
</div>
</div>

Expand Down
190 changes: 100 additions & 90 deletions scripts/dashboard/build-dashboard.js
Original file line number Diff line number Diff line change
@@ -1,34 +1,90 @@
const { writeFileSync } = require('fs');
const { resolve } = require('path');
const { graphql } = require('@octokit/graphql');
const { Promise } = require('node-fetch');
const { Queries } = require('./issue-queries');

async function getHotDiscussions(discussions) {
const result = await Promise.all(
discussions.map(async (discussion) => {
/**
* Introduces a delay in the execution flow.
* @param {number} ms - The number of milliseconds to pause.
* @returns {Promise<void>} A promise that resolves after the specified delay.
*/
async function pause(ms) {
return new Promise((res) => {
setTimeout(res, ms);
});
}

async function getDiscussions(query, pageSize, endCursor = null) {
try {
const result = await graphql(query, {
first: pageSize,
after: endCursor,
headers: {
authorization: `token ${process.env.GITHUB_TOKEN}`
}
});

if (result.rateLimit.remaining <= 100) {
console.log(
`[WARNING] GitHub GraphQL rateLimit`,
`cost = ${result.rateLimit.cost}`,
`limit = ${result.rateLimit.limit}`,
`remaining = ${result.rateLimit.remaining}`,
`resetAt = ${result.rateLimit.resetAt}`
);
}

await pause(500);

const { hasNextPage } = result.search.pageInfo;

if (!hasNextPage) {
return result.search.nodes;
}
return result.search.nodes.concat(await getDiscussions(query, pageSize, result.search.pageInfo.endCursor));
} catch (e) {
console.error(e);

return Promise.reject(e);
}
}
async function getDiscussionByID(isPR, id) {
try {
const result = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, {
id,
headers: {
authorization: `token ${process.env.GITHUB_TOKEN}`
}
});

return result;
} catch (e) {
console.error(e);

return Promise.reject(e);
}
}

async function processHotDiscussions(batch) {
return Promise.all(
batch.map(async (discussion) => {
try {
// eslint-disable-next-line no-underscore-dangle
const isPR = discussion.__typename === 'PullRequest';
if (discussion.comments.pageInfo.hasNextPage) {
let fetchedDiscussion = await getDiscussionByID(isPR, discussion.id);
const fetchedDiscussion = await getDiscussionByID(isPR, discussion.id);
discussion = fetchedDiscussion.node;
}

const interactionsCount =
discussion.reactions.totalCount +
discussion.comments.totalCount +
discussion.comments.nodes.reduce(
(acc, curr) => acc + curr.reactions.totalCount,
0
);
discussion.comments.nodes.reduce((acc, curr) => acc + curr.reactions.totalCount, 0);

const finalInteractionsCount = isPR
? interactionsCount +
discussion.reviews.totalCount +
discussion.reviews.nodes.reduce(
(acc, curr) => acc + curr.comments.totalCount,
0
)
discussion.reviews.totalCount +
discussion.reviews.nodes.reduce((acc, curr) => acc + curr.comments.totalCount, 0)
: interactionsCount;
return {
id: discussion.id,
Expand All @@ -37,130 +93,84 @@ async function getHotDiscussions(discussions) {
title: discussion.title,
author: discussion.author ? discussion.author.login : '',
resourcePath: discussion.resourcePath,
repo: 'asyncapi/' + discussion.repository.name,
repo: `asyncapi/${discussion.repository.name}`,
labels: discussion.labels ? discussion.labels.nodes : [],
score:
finalInteractionsCount /
Math.pow(monthsSince(discussion.timelineItems.updatedAt) + 2, 1.8),
score: finalInteractionsCount / (monthsSince(discussion.timelineItems.updatedAt) + 2) ** 1.8
};
} catch (e) {
console.error(
`there was some issues while parsing this item: ${JSON.stringify(
discussion
)}`
);
console.error(`there was some issues while parsing this item: ${JSON.stringify(discussion)}`);
throw e;
}
})
);
}

async function getHotDiscussions(discussions) {
const result = [];
const batchSize = 5;

for (let i = 0; i < discussions.length; i += batchSize) {
const batch = discussions.slice(i, i + batchSize);
// eslint-disable-next-line no-await-in-loop
const batchResults = await processHotDiscussions(batch);

// eslint-disable-next-line no-await-in-loop
await pause(1000);

result.push(...batchResults);
}
result.sort((ElemA, ElemB) => ElemB.score - ElemA.score);
const filteredResult = result.filter(issue => issue.author !== 'asyncapi-bot');
const filteredResult = result.filter((issue) => issue.author !== 'asyncapi-bot');
return filteredResult.slice(0, 12);
}
async function writeToFile(content) {
writeFileSync(
resolve(__dirname, '..', '..', 'dashboard.json'),
JSON.stringify(content, null, ' ')
);
writeFileSync(resolve(__dirname, '..', '..', 'dashboard.json'), JSON.stringify(content, null, ' '));
}
async function mapGoodFirstIssues(issues) {
return issues.map((issue) => ({
id: issue.id,
title: issue.title,
isAssigned: !!issue.assignees.totalCount,
resourcePath: issue.resourcePath,
repo: 'asyncapi/' + issue.repository.name,
repo: `asyncapi/${issue.repository.name}`,
author: issue.author.login,
area: getLabel(issue, 'area/') || 'Unknown',
labels: issue.labels.nodes.filter(
(label) =>
!label.name.startsWith('area/') &&
!label.name.startsWith('good first issue')
),
(label) => !label.name.startsWith('area/') && !label.name.startsWith('good first issue')
)
}));
}

function getLabel(issue, filter) {
const result = issue.labels.nodes.find((label) =>
label.name.startsWith(filter)
);
return result && result.name.split('/')[1];
const result = issue.labels.nodes.find((label) => label.name.startsWith(filter));
return result?.name.split('/')[1];
}


function monthsSince(date) {
const seconds = Math.floor((new Date() - new Date(date)) / 1000);
// 2592000 = number of seconds in a month = 30 * 24 * 60 * 60
const months = seconds / 2592000;
return Math.floor(months);
}

async function getDiscussions(query, pageSize, endCursor = null) {
try {
let result = await graphql(query, {
first: pageSize,
after: endCursor,
headers: {
authorization: `token ${process.env.GITHUB_TOKEN}`,
},
});

if (result.rateLimit.remaining <= 100) {
console.log(
`[WARNING] GitHub GraphQL rateLimit`,
`cost = ${result.rateLimit.cost}`,
`limit = ${result.rateLimit.limit}`,
`remaining = ${result.rateLimit.remaining}`,
`resetAt = ${result.rateLimit.resetAt}`
)
}

const hasNextPage = result.search.pageInfo.hasNextPage;

if (!hasNextPage) {
return result.search.nodes;
} else {
return result.search.nodes.concat(
await getDiscussions(query, pageSize, result.search.pageInfo.endCursor)
);
}
} catch (e) {
console.error(e);
}
}
async function getDiscussionByID(isPR, id) {
try {
let result = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, {
id,
headers: {
authorization: `token ${process.env.GITHUB_TOKEN}`,
},

}
);
return result;
} catch (e) {
console.error(e);
}
}
async function start() {
try {
const [issues, PRs, rawGoodFirstIssues] = await Promise.all([
getDiscussions(Queries.hotDiscussionsIssues, 20),
getDiscussions(Queries.hotDiscussionsPullRequests, 20),
getDiscussions(Queries.goodFirstIssues, 20),
getDiscussions(Queries.goodFirstIssues, 20)
]);
const discussions = issues.concat(PRs);
const [hotDiscussions, goodFirstIssues] = await Promise.all([
getHotDiscussions(discussions),
mapGoodFirstIssues(rawGoodFirstIssues),
mapGoodFirstIssues(rawGoodFirstIssues)
]);
writeToFile({ hotDiscussions, goodFirstIssues });
} catch (e) {
console.log('There were some issues parsing data from github.')
console.log('There were some issues parsing data from github.');
console.log(e);
}
}
start();

module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID }
module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID };

0 comments on commit 2be7824

Please sign in to comment.