Skip to content

Commit

Permalink
use getTaskInfo with a list of task IDs
Browse files Browse the repository at this point in the history
  • Loading branch information
obriencj committed Jul 12, 2023
1 parent 3c8f1a3 commit 71a8826
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 9 deletions.
16 changes: 8 additions & 8 deletions kojismokydingo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,20 +571,20 @@ def bulk_load_tasks(
:param results: mapping to store the results in. Default, produce
a new dict
:raises NoSuchTask: if err is True and a task couldn'tb e loaded
:raises NoSuchTask: if err is True and a task couldn't be loaded
:since: 1.0
"""

results = {} if results is None else results
fn = partial(session.getTaskInfo, request=request, strict=False)

fn = partial(session.getTaskInfo, request=request)

for key, info in iter_bulk_load(session, fn, task_ids, False, size):
if err and not info:
raise NoSuchTask(key)
else:
results[key] = info
for key_chunk in chunkseq(task_ids, size):
for key, info in zip(key_chunk, fn(key_chunk)):
if err and not info:
raise NoSuchTask(key)
else:
results[key] = info

return results

Expand Down
35 changes: 34 additions & 1 deletion kojismokydingo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from fnmatch import fnmatchcase
from functools import lru_cache
from glob import glob
from itertools import filterfalse
from itertools import filterfalse, islice
from operator import itemgetter
from os.path import expanduser, isdir, join
from typing import (
Expand All @@ -58,6 +58,7 @@
"find_config_files",
"get_plugin_config",
"globfilter",
"ichunkseq",
"load_full_config",
"load_plugin_config",
"merge_extend",
Expand Down Expand Up @@ -89,6 +90,38 @@ def chunkseq(
offset in range(0, seqlen, chunksize))


def ichunkseq(
seq: Iterable,
chunksize: int) -> Iterator[Iterable]:
"""
Similar to chunkseq, but lazy. Note that each chunk must be
exhausted before beginning a new chunk, as the chunks will be
reading from the original sequence only when they themselves are
iterated over.
:param seq: a sequence to chunk up
:param chunksize: max length for chunks
:since: 2.1
"""

it = iter(seq)
cs = chunksize - 1

def chunk(primer):
yield primer
yield from islice(it, cs)

while True:
try:
primer = next(it)
except StopIteration:
break
else:
yield chunk(primer)


def escapable_replace(
orig: str,
character: str,
Expand Down

0 comments on commit 71a8826

Please sign in to comment.