Skip to content

Commit

Permalink
Merge pull request #1 from EpicRandomGuy2/legendlore/dev
Browse files Browse the repository at this point in the history
Add do_not_post and name_change functions, filter out AI maps, add env var for test environment switching
  • Loading branch information
EpicRandomGuy2 authored Apr 3, 2024
2 parents 0c4c33d + f60e9fe commit df6ce0a
Show file tree
Hide file tree
Showing 4 changed files with 114 additions and 12 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,6 @@ examples
__pycache__
credentials.json
pushshift/*
Dockerfile
Dockerfile
do_not_post.py
name_change.py
17 changes: 14 additions & 3 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,23 @@
import os

env = os.getenv("ENV") # Dev or Prod

# Constants
# Fill these with your own stuff
APP_NAME = "LegendLore"
APP_VERSION = "1.0"
CONNECTION_STRING = "mongodb://localhost:27017/"
DB_NAME = "MapTaggerReddit"
if env == "PROD":
CONNECTION_STRING = "mongodb://192.168.1.47:27017/"
DB_NAME = "MapTaggerReddit"
else:
CONNECTION_STRING = "mongodb://localhost:27017/"
DB_NAME = "MapTaggerReddit"
DEFAULT_SUBREDDIT = "all"
CREDENTIALS_FILE = "credentials.json"
NOTION_DB_ID = "95830a9189804ba29e9681e78b0236af"
if env == "PROD":
NOTION_DB_ID = "95830a9189804ba29e9681e78b0236af" # Prod Notion
else:
NOTION_DB_ID = "e7d05d2c6280444698b59fa79df3f78f" # Dev Notion
NOTION_DB_NAME = "LegendLore"
NUMBER_OF_DAYS_OLD = 7
UPDATE_SCORES_LIMIT = 250
Expand Down
32 changes: 26 additions & 6 deletions legend_lore.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import argparse
import traceback
import mongodb_local
Expand All @@ -13,6 +14,7 @@
NUMBER_OF_DAYS_OLD,
UPDATE_SCORES_LIMIT,
)
from name_change import NAME_CHANGE


def parse_args():
Expand All @@ -32,22 +34,27 @@ def parse_args():
help="Name of subreddit to parse, e.g. 'battlemaps'",
) # Specify subreddit for parsing
parser.add_argument(
"-u",
"--update-scores",
action="store_true",
help="Set to update the scores of the most recent 500 posts",
) # Specify subreddit for parsing
parser.add_argument(
"--update-names",
action="store_true",
help="Set to update the scores of the most recent 500 posts",
) # Specify subreddit for parsing
# -h / --help exists by default and prints prog, description, epilog

args = parser.parse_args()

return [args.database, args.subreddit, args.update_scores]
return [args.database, args.subreddit, args.update_scores, args.update_names]


def main():

# Handle script arguments
db_name, subreddit_name, update_scores = parse_args()
db_name, subreddit_name, update_scores, update_names = parse_args()
env = os.getenv("ENV") # Dev or Prod

# To-do: Trigger script on new post to any of the subs

Expand Down Expand Up @@ -99,9 +106,10 @@ def main():
# Only use this to reset tags on a post (you probably don't want to do this, you'll have to pay to re-tag it)
# mongodb_local.reset_post_tags(post, subreddit="gpt_test")

# Analyzes post, and if it comes out untagged, second function tries to tag it by passing in a higher res image (costs ~1 cent per)
gpt4v_api.analyze_and_tag_post(post, append=False)
gpt4v_api.analyze_untagged_post(post, append=False)
if env == "PROD":
# Analyzes post, and if it comes out untagged, second function tries to tag it by passing in a higher res image (costs ~1 cent per)
gpt4v_api.analyze_and_tag_post(post, append=False)
gpt4v_api.analyze_untagged_post(post, append=False)

# After tagging, we need to update the post var for it to send to Notion
post = mongodb_local.get_post_from_db(post["title"]).iloc[0].to_dict()
Expand Down Expand Up @@ -139,6 +147,18 @@ def main():

count += 1

# If creator has requested a name change in LegendLore, hit the Notion API to update
# all instances of that name.

if update_names == True:
print(f"Changing {len(NAME_CHANGE)} names...")
# Just to keep track of script progress
count = 0
for name in NAME_CHANGE:
notion.send_updated_username_to_notion(name)
print(count)
count += 1


if __name__ == "__main__":
main()
73 changes: 71 additions & 2 deletions notion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from mongodb_local import get_database_client, get_post_from_db, set_sent_to_notion
from pandas import DataFrame
from config import NOTION_DB_ID, CREDENTIALS_FILE
from do_not_post import DO_NOT_POST
from name_change import NAME_CHANGE
from pprint import pprint


Expand All @@ -37,6 +39,12 @@ def send_to_notion(
post, subreddit=subreddit, credentials=CREDENTIALS_FILE
)
return
# If the creator has requested to be excluded from LegendLore, return without posting
# set sent_to_notion flag so it won't attempt again in the future
elif name_in_do_not_post(post):
print(f"{post['title']} - {post['author']} in do_not_post, skipping...")
set_sent_to_notion(post, subreddit=subreddit)
return

# If dupe and no overwrite, skip this post
elif handle_duplicates(post, overwrite, subreddit=subreddit) == False:
Expand Down Expand Up @@ -98,10 +106,15 @@ def send_to_notion(
"children": [],
}

# Create children for each post - the embed and Reddit link
# If map is tagged [AI] in title, set sent_to_notion so it doesn't try again, and skip the actual send
if "[AI]" in post["title"].upper():
set_sent_to_notion(post, subreddit=subreddit)

print(f"{post['title']} is tagged [AI], skipping...")
break
# Create children for each post - the embed and Reddit link
# Different urls need to have embedding handled differently
if "i.redd.it" in post["url"]:
elif "i.redd.it" in post["url"]:
child = [
{
"object": "block",
Expand Down Expand Up @@ -344,6 +357,11 @@ def handle_duplicates(
return True


def name_in_do_not_post(post):

return post["author"] in DO_NOT_POST


def send_updated_score_to_notion(
post,
subreddit=None,
Expand Down Expand Up @@ -388,3 +406,54 @@ def send_updated_score_to_notion(
)

# print(update_response.json())


def send_updated_username_to_notion(name, credentials=CREDENTIALS_FILE):

print(f"Updating names for {name} -> {NAME_CHANGE[name]} in Notion...")

with open(credentials) as credentials_json:
credentials = json.load(credentials_json)

token = credentials["notion_token"]

headers = {
"Authorization": "Bearer " + token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
}

# Get page by title
notion_search_url = f"https://api.notion.com/v1/databases/{NOTION_DB_ID}/query"
search_payload = {
"filter": {
"property": "Creator",
"title": {"equals": name},
}
}

search_response = requests.post(
notion_search_url, json=search_payload, headers=headers
)

# print(search_response.json())

# Update score for all pages matching post title
for page in search_response.json()["results"]:

notion_page_url = f"https://api.notion.com/v1/pages/{page['id']}"

update_payload = {
"properties": {
"Creator": {
"type": "rich_text",
"rich_text": [{"text": {"content": NAME_CHANGE[name]}}],
},
}
}

update_response = requests.patch(
notion_page_url, json=update_payload, headers=headers
)

# print(update_response.json())

0 comments on commit df6ce0a

Please sign in to comment.