diff --git a/.github/workflows/regenerate-meetings-and-videos.yml b/.github/workflows/regenerate-meetings-and-videos.yml index 72aeda4e9e6..34509995d06 100644 --- a/.github/workflows/regenerate-meetings-and-videos.yml +++ b/.github/workflows/regenerate-meetings-and-videos.yml @@ -1,6 +1,6 @@ name: List everyday latest list of AsyncAPI Meetings, Newsroom Videos and Dashboard data. -on: +on: workflow_dispatch: schedule: #every day at midnight @@ -23,7 +23,7 @@ jobs: - name: Check package-lock version uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master id: lockversion - + - name: Use Node.js uses: actions/setup-node@v3 with: @@ -45,7 +45,7 @@ jobs: committer: asyncapi-bot author: asyncapi-bot title: 'chore: update meetings.json and newsrooom_videos.json' - branch: update-meetings/${{ github.job }} + branch: update-meetings/${{ github.sha }} - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel name: Report workflow run status to Slack uses: 8398a7/action-slack@fbd6aa58ba854a740e11a35d0df80cb5d12101d8 #using https://github.com/8398a7/action-slack/releases/tag/v3.15.1 @@ -54,4 +54,4 @@ jobs: fields: repo,action,workflow text: 'AsyncAPI Meetings and Videos workflow failed' env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} \ No newline at end of file + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} diff --git a/components/newsroom/Newsroom.tsx b/components/newsroom/Newsroom.tsx index 87b8bdff180..a88c4e2f14c 100644 --- a/components/newsroom/Newsroom.tsx +++ b/components/newsroom/Newsroom.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import { TwitterTimelineEmbed } from 'react-twitter-embed'; import { HeadingLevel, HeadingTypeStyle } from '@/types/typography/Heading'; import { ParagraphTypeStyle } from '@/types/typography/Paragraph'; @@ -69,19 +68,14 @@ export default function Newsroom() { -
+
-
+
-
-
- -
-
diff --git a/scripts/dashboard/build-dashboard.js b/scripts/dashboard/build-dashboard.js index 49fbf8b5d4f..120109a05c2 100644 --- a/scripts/dashboard/build-dashboard.js +++ b/scripts/dashboard/build-dashboard.js @@ -1,34 +1,90 @@ const { writeFileSync } = require('fs'); const { resolve } = require('path'); const { graphql } = require('@octokit/graphql'); -const { Promise } = require('node-fetch'); const { Queries } = require('./issue-queries'); -async function getHotDiscussions(discussions) { - const result = await Promise.all( - discussions.map(async (discussion) => { +/** + * Introduces a delay in the execution flow. + * @param {number} ms - The number of milliseconds to pause. + * @returns {Promise} A promise that resolves after the specified delay. + */ +async function pause(ms) { + return new Promise((res) => { + setTimeout(res, ms); + }); +} + +async function getDiscussions(query, pageSize, endCursor = null) { + try { + const result = await graphql(query, { + first: pageSize, + after: endCursor, + headers: { + authorization: `token ${process.env.GITHUB_TOKEN}` + } + }); + + if (result.rateLimit.remaining <= 100) { + console.log( + `[WARNING] GitHub GraphQL rateLimit`, + `cost = ${result.rateLimit.cost}`, + `limit = ${result.rateLimit.limit}`, + `remaining = ${result.rateLimit.remaining}`, + `resetAt = ${result.rateLimit.resetAt}` + ); + } + + await pause(500); + + const { hasNextPage } = result.search.pageInfo; + + if (!hasNextPage) { + return result.search.nodes; + } + return result.search.nodes.concat(await getDiscussions(query, pageSize, result.search.pageInfo.endCursor)); + } catch (e) { + console.error(e); + + return Promise.reject(e); + } +} +async function getDiscussionByID(isPR, id) { + try { + const result = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, { + id, + headers: { + authorization: `token ${process.env.GITHUB_TOKEN}` + } + }); + + return result; + } catch (e) { + console.error(e); + + return Promise.reject(e); + } +} + +async function processHotDiscussions(batch) { + return Promise.all( + batch.map(async (discussion) => { try { + // eslint-disable-next-line no-underscore-dangle const isPR = discussion.__typename === 'PullRequest'; if (discussion.comments.pageInfo.hasNextPage) { - let fetchedDiscussion = await getDiscussionByID(isPR, discussion.id); + const fetchedDiscussion = await getDiscussionByID(isPR, discussion.id); discussion = fetchedDiscussion.node; } const interactionsCount = discussion.reactions.totalCount + discussion.comments.totalCount + - discussion.comments.nodes.reduce( - (acc, curr) => acc + curr.reactions.totalCount, - 0 - ); + discussion.comments.nodes.reduce((acc, curr) => acc + curr.reactions.totalCount, 0); const finalInteractionsCount = isPR ? interactionsCount + - discussion.reviews.totalCount + - discussion.reviews.nodes.reduce( - (acc, curr) => acc + curr.comments.totalCount, - 0 - ) + discussion.reviews.totalCount + + discussion.reviews.nodes.reduce((acc, curr) => acc + curr.comments.totalCount, 0) : interactionsCount; return { id: discussion.id, @@ -37,31 +93,38 @@ async function getHotDiscussions(discussions) { title: discussion.title, author: discussion.author ? discussion.author.login : '', resourcePath: discussion.resourcePath, - repo: 'asyncapi/' + discussion.repository.name, + repo: `asyncapi/${discussion.repository.name}`, labels: discussion.labels ? discussion.labels.nodes : [], - score: - finalInteractionsCount / - Math.pow(monthsSince(discussion.timelineItems.updatedAt) + 2, 1.8), + score: finalInteractionsCount / (monthsSince(discussion.timelineItems.updatedAt) + 2) ** 1.8 }; } catch (e) { - console.error( - `there was some issues while parsing this item: ${JSON.stringify( - discussion - )}` - ); + console.error(`there was some issues while parsing this item: ${JSON.stringify(discussion)}`); throw e; } }) ); +} + +async function getHotDiscussions(discussions) { + const result = []; + const batchSize = 5; + + for (let i = 0; i < discussions.length; i += batchSize) { + const batch = discussions.slice(i, i + batchSize); + // eslint-disable-next-line no-await-in-loop + const batchResults = await processHotDiscussions(batch); + + // eslint-disable-next-line no-await-in-loop + await pause(1000); + + result.push(...batchResults); + } result.sort((ElemA, ElemB) => ElemB.score - ElemA.score); - const filteredResult = result.filter(issue => issue.author !== 'asyncapi-bot'); + const filteredResult = result.filter((issue) => issue.author !== 'asyncapi-bot'); return filteredResult.slice(0, 12); } async function writeToFile(content) { - writeFileSync( - resolve(__dirname, '..', '..', 'dashboard.json'), - JSON.stringify(content, null, ' ') - ); + writeFileSync(resolve(__dirname, '..', '..', 'dashboard.json'), JSON.stringify(content, null, ' ')); } async function mapGoodFirstIssues(issues) { return issues.map((issue) => ({ @@ -69,25 +132,20 @@ async function mapGoodFirstIssues(issues) { title: issue.title, isAssigned: !!issue.assignees.totalCount, resourcePath: issue.resourcePath, - repo: 'asyncapi/' + issue.repository.name, + repo: `asyncapi/${issue.repository.name}`, author: issue.author.login, area: getLabel(issue, 'area/') || 'Unknown', labels: issue.labels.nodes.filter( - (label) => - !label.name.startsWith('area/') && - !label.name.startsWith('good first issue') - ), + (label) => !label.name.startsWith('area/') && !label.name.startsWith('good first issue') + ) })); } function getLabel(issue, filter) { - const result = issue.labels.nodes.find((label) => - label.name.startsWith(filter) - ); - return result && result.name.split('/')[1]; + const result = issue.labels.nodes.find((label) => label.name.startsWith(filter)); + return result?.name.split('/')[1]; } - function monthsSince(date) { const seconds = Math.floor((new Date() - new Date(date)) / 1000); // 2592000 = number of seconds in a month = 30 * 24 * 60 * 60 @@ -95,72 +153,24 @@ function monthsSince(date) { return Math.floor(months); } -async function getDiscussions(query, pageSize, endCursor = null) { - try { - let result = await graphql(query, { - first: pageSize, - after: endCursor, - headers: { - authorization: `token ${process.env.GITHUB_TOKEN}`, - }, - }); - - if (result.rateLimit.remaining <= 100) { - console.log( - `[WARNING] GitHub GraphQL rateLimit`, - `cost = ${result.rateLimit.cost}`, - `limit = ${result.rateLimit.limit}`, - `remaining = ${result.rateLimit.remaining}`, - `resetAt = ${result.rateLimit.resetAt}` - ) - } - - const hasNextPage = result.search.pageInfo.hasNextPage; - - if (!hasNextPage) { - return result.search.nodes; - } else { - return result.search.nodes.concat( - await getDiscussions(query, pageSize, result.search.pageInfo.endCursor) - ); - } - } catch (e) { - console.error(e); - } -} -async function getDiscussionByID(isPR, id) { - try { - let result = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, { - id, - headers: { - authorization: `token ${process.env.GITHUB_TOKEN}`, - }, - - } - ); - return result; - } catch (e) { - console.error(e); - } -} async function start() { try { const [issues, PRs, rawGoodFirstIssues] = await Promise.all([ getDiscussions(Queries.hotDiscussionsIssues, 20), getDiscussions(Queries.hotDiscussionsPullRequests, 20), - getDiscussions(Queries.goodFirstIssues, 20), + getDiscussions(Queries.goodFirstIssues, 20) ]); const discussions = issues.concat(PRs); const [hotDiscussions, goodFirstIssues] = await Promise.all([ getHotDiscussions(discussions), - mapGoodFirstIssues(rawGoodFirstIssues), + mapGoodFirstIssues(rawGoodFirstIssues) ]); writeToFile({ hotDiscussions, goodFirstIssues }); } catch (e) { - console.log('There were some issues parsing data from github.') + console.log('There were some issues parsing data from github.'); console.log(e); } } start(); -module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID } \ No newline at end of file +module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID };