Skip to content

Commit

Permalink
Send blog posts from RSS feed into text channel
Browse files Browse the repository at this point in the history
  • Loading branch information
raccube committed Aug 6, 2024
1 parent 79ddc16 commit 3cdeca9
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 2 deletions.
2 changes: 2 additions & 0 deletions requirements.in
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
python-dotenv
discord
feedparser
beautifulsoup4
8 changes: 8 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,14 @@ async-timeout==4.0.3
# via aiohttp
attrs==24.1.0
# via aiohttp
beautifulsoup4==4.12.3
# via -r requirements.in
discord==2.3.2
# via -r requirements.in
discord-py==2.4.0
# via discord
feedparser==6.0.11
# via -r requirements.in
frozenlist==1.4.1
# via
# aiohttp
Expand All @@ -30,5 +34,9 @@ multidict==6.0.5
# yarl
python-dotenv==1.0.1
# via -r requirements.in
sgmllib3k==1.0.0
# via feedparser
soupsieve==2.5
# via beautifulsoup4
yarl==1.9.4
# via aiohttp
2 changes: 1 addition & 1 deletion src/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import logging

from constants import *
from typing import Tuple, AsyncGenerator
from typing import Tuple, AsyncGenerator, Optional


class BotClient(discord.Client):
Expand Down
4 changes: 4 additions & 0 deletions src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,7 @@
SPECIAL_ROLE = "unverified-volunteer"

PASSWORDS_CHANNEL_NAME = "role-passwords"

FEED_URL = "https://studentrobotics.org/feed.xml"
FEED_CHANNEL_NAME = "blog"
FEED_CHECK_INTERVAL = 10 # seconds
13 changes: 12 additions & 1 deletion src/main.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import asyncio
import os
import sys
import logging
from dotenv import load_dotenv

from bot import BotClient
from rss import post_check_timer

logger = logging.getLogger('srbot')
logger.setLevel(logging.INFO)
Expand All @@ -13,4 +15,13 @@

load_dotenv()
bot = BotClient(logger=logger)
bot.run(os.getenv('DISCORD_TOKEN'))
loop = asyncio.get_event_loop()

try:
loop.create_task(post_check_timer(bot))
loop.run_until_complete(bot.start(os.getenv('DISCORD_TOKEN')))
except KeyboardInterrupt:
loop.run_until_complete(bot.close())
# cancel all tasks lingering
finally:
loop.close()
58 changes: 58 additions & 0 deletions src/rss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import asyncio
from typing import Optional

import discord
import feedparser
from bs4 import BeautifulSoup
from feedparser import FeedParserDict

from src.bot import BotClient
from src.constants import FEED_URL, FEED_CHECK_INTERVAL, FEED_CHANNEL_NAME


def get_feed_channel(bot: BotClient) -> discord.TextChannel:
for channel in bot.get_all_channels():
if channel.name == FEED_CHANNEL_NAME:
return channel


async def get_last_blog_post(channel: discord.TextChannel) -> str | None:
# TODO: This doesn't work when the bot is restarted, store the URL instead
last_message: Optional[discord.Message] = channel.last_message
if last_message is not None and len(last_message.embeds) > 0:
return last_message.embeds[0].url

return None


async def check_posts(bot: BotClient):
feed = feedparser.parse(FEED_URL)
channel = get_feed_channel(bot)
post = feed.entries[0]
newest_post_url = post.link
last_message_url = await get_last_blog_post(channel)
if newest_post_url != last_message_url:
await channel.send(embed=create_embed(post))


def create_embed(post: FeedParserDict) -> discord.Embed:
soup = BeautifulSoup(post.content[0].value, 'html.parser')

embed = discord.Embed(
title=post.title,
type="article",
url=post.link,
description=soup.p.text,
)

if len(post.media_thumbnail) > 0:
embed.set_image(url=post.media_thumbnail[0]['url'])

return embed


async def post_check_timer(bot: BotClient):
await bot.wait_until_ready()
while True:
await check_posts(bot)
await asyncio.sleep(FEED_CHECK_INTERVAL)

0 comments on commit 3cdeca9

Please sign in to comment.