Skip to content

Commit

Permalink
Changed: Page size to 100 to hopefully get less PERSISTENT_QUERY-er…
Browse files Browse the repository at this point in the history
…rors.
  • Loading branch information
basrieter committed Oct 31, 2023
1 parent 5c617f4 commit 3bb02fd
Showing 1 changed file with 11 additions and 4 deletions.
15 changes: 11 additions & 4 deletions channels/channel.se/tv4se/chn_tv4se.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, channel_info):
chn_class.Channel.__init__(self, channel_info)

# ============== Actual channel setup STARTS here and should be overwritten from derived classes ===============
self.__max_page_size = 500
self.__max_page_size = 100
self.__access_token = None

if self.channelCode == "tv4segroup":
Expand Down Expand Up @@ -245,8 +245,10 @@ def fetch_mainlist_pages(self, data: str) -> Tuple[str, List[MediaItem]]:
items = []
data = JsonHelper(data)
page_data = data
count = 0

while True:
while count < 25:
count += 1
next_offset = page_data.get_value("data", "mediaIndex", "contentList", "pageInfo",
"nextPageOffset")
if not next_offset or next_offset <= 0:
Expand All @@ -260,8 +262,13 @@ def fetch_mainlist_pages(self, data: str) -> Tuple[str, List[MediaItem]]:
"offset": next_offset}
}
)
page_data = UriHandler.open(url, additional_headers=self.httpHeaders)
page_data = JsonHelper(page_data)
new_data = UriHandler.open(url, additional_headers=self.httpHeaders, force_cache_duration=60*60)
if "PERSISTED_QUERY_NOT_FOUND" in new_data:
Logger.warning("PERSISTED_QUERY_NOT_FOUND found")
time.sleep(2)
continue

page_data = JsonHelper(new_data)
data_items = page_data.get_value(*self.currentParser.Parser)
list_items = data.get_value(*self.currentParser.Parser)
list_items += data_items
Expand Down

0 comments on commit 3bb02fd

Please sign in to comment.