Skip to content

Commit

Permalink
Handle pagination tranparently and refactor code some more (#8)
Browse files Browse the repository at this point in the history
  • Loading branch information
jochenklar committed Dec 11, 2024
1 parent edd675c commit 0cc4cbc
Show file tree
Hide file tree
Showing 3 changed files with 126 additions and 81 deletions.
70 changes: 11 additions & 59 deletions isimip_client/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,18 @@

import click
from rich import print_json
from rich.console import Console
from rich.logging import RichHandler
from rich.pretty import pretty_repr
from rich.table import Table
from rich.text import Text

from .client import ISIMIPClient

logging.basicConfig(level='INFO', format='%(message)s', handlers=[RichHandler()])


class SearchArgumentType(click.ParamType):
name = "search"

def convert(self, value, param, ctx):
try:
search_key, search_value = value.split('=')
return (search_key, search_value)
except ValueError:
self.fail(f'{param} needs to be of the form key=value')
from .utils import SearchArgumentType, print_details_table, print_results_table


@click.group()
@click.option('--log-level', default='WARNING')
@click.pass_context
def main(ctx):
def main(ctx, log_level):
logging.basicConfig(level=log_level.upper(), format='%(message)s', handlers=[RichHandler()])

ctx.ensure_object(dict)
ctx.obj['client'] = ISIMIPClient(
data_url='https://data.isimip.org/api/v1',
Expand All @@ -45,54 +32,19 @@ def print_response(ctx, response, **kwargs):
elif ctx.obj.get('json'):
print_json(data=response)
else:
table = Table()

if 'results' in response:
table.add_column('id', style='green')
table.add_column('path', style='cyan')
table.add_column('version')
for result in response['results']:
row = [result[key] for key in ['id', 'path', 'version']]
table.add_row(*row)
if isinstance(response, list):
print_results_table(response)
elif 'results' in response:
print_results_table(response['results'])
else:
table.add_column('key')
table.add_column('value')
for key in [
'id',
'path',
'version',
'size',
'checksum',
'checksum_type',
'specifiers',
'resources',
'caveats',
'metadata_url',
'file_url',
'json_url'
]:
value = response.get(key)
if value is None:
continue
elif isinstance(value, (dict, list)):
table.add_row(key, pretty_repr(value))
else:
text = Text(str(value))
if key == 'id':
text.stylize('green')
elif key == 'path':
text.stylize('cyan')
table.add_row(key, text)

console = Console()
console.print(table)
print_details_table(response)


@main.command()
@click.pass_context
@click.argument('search', nargs=-1, type=SearchArgumentType())
@click.option('--page', default=1)
@click.option('--page-size', default=10)
# @click.option('--page-size', default=10)
@click.option('--json', is_flag=True)
def datasets(ctx, search, json, **kwargs):
ctx.obj['json'] = json
Expand Down
72 changes: 50 additions & 22 deletions isimip_client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,46 +10,61 @@

logger = logging.getLogger(__name__)


class HTTPClient:

def __init__(self, base_url, auth, headers, params):
self.base_url, self.auth, self.headers, self.params = base_url, auth, headers, params
def __init__(self, auth, headers):
self.auth, self.headers = auth, headers

def parse_response(self, response):
try:
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
logger.error(f'{e} response={response.json()}')
try:
logger.error(f'{e} response={response.json()}')
except json.decoder.JSONDecodeError as e:
logger.error(f'{e} content={response.content}')
return None

def get(self, url, params={}):
response = requests.get(self.base_url + url, params=dict(self.params, **params),
auth=self.auth, headers=self.headers)
logger.info(f'GET url={url} params={params}')
response = requests.get(url, params=params, auth=self.auth, headers=self.headers)
return self.parse_response(response)

def post(self, url, data):
response = requests.post(self.base_url + url, json=data, auth=self.auth, headers=self.headers)
logger.info(f'POST url={url} data={data}')
response = requests.post(url, json=data, auth=self.auth, headers=self.headers)
return self.parse_response(response)

def put(self, url, data):
response = requests.put(self.base_url + url, data, auth=self.auth, headers=self.headers)
logger.info(f'PUT url={url} data={data}')
response = requests.put(url, data, auth=self.auth, headers=self.headers)
return self.parse_response(response)

def patch(self, url, data):
response = requests.patch(self.base_url + url, json=data, auth=self.auth, headers=self.headers)
logger.info(f'PATCH url={url} data={data}')
response = requests.patch(url, json=data, auth=self.auth, headers=self.headers)
return self.parse_response(response)

def delete(self, url):
response = requests.delete(self.base_url + url, auth=self.auth, headers=self.headers)
logger.info(f'DELETE url={url}')
response = requests.delete(url, auth=self.auth, headers=self.headers)
return self.parse_response(response)


class RESTClient(HTTPClient):

def _build_url(self, resource_url, kwargs, pk=None):
url = resource_url.rstrip('/') + '/'
max_results = 1000
page_size = 100

def __init__(self, base_url, params, *args, **kwargs):
super().__init__(*args, **kwargs)
self.base_url = base_url
self.params = {'page_size': self.page_size}
self.params.update(params)

def build_url(self, resource_url, kwargs, pk=None):
url = self.base_url.rstrip('/') + resource_url.rstrip('/') + '/'

if 'list_route' in kwargs:
url += kwargs.pop('list_route').rstrip('/') + '/'
Expand All @@ -66,23 +81,37 @@ def _build_url(self, resource_url, kwargs, pk=None):
return url

def list(self, resource_url, **kwargs):
url = self._build_url(resource_url, kwargs)
return self.get(url, params=kwargs)
paginate = kwargs.pop('paginate', False)
url = self.build_url(resource_url, kwargs)
response = self.get(url, params=dict(self.params, **kwargs))

if paginate:
return response
else:
results = response['results']
while len(results) < self.max_results:
next_url = response.get('next')
if next_url:
response = self.get(next_url)
results += response['results']
else:
break
return results

def retrieve(self, resource_url, pk, **kwargs):
url = self._build_url(resource_url, kwargs, pk)
url = self.build_url(resource_url, kwargs, pk)
return self.get(url)

def create(self, resource_url, data, **kwargs):
url = self._build_url(resource_url, kwargs)
url = self.build_url(resource_url, kwargs)
return self.post(url, data)

def update(self, resource_url, pk, data, **kwargs):
url = self._build_url(resource_url, kwargs, pk)
url = self.build_url(resource_url, kwargs, pk)
return self.put(url, data)

def destroy(self, resource_url, pk, **kwargs):
url = self._build_url(resource_url, kwargs, pk)
url = self.build_url(resource_url, kwargs, pk)
return self.delete(url, pk)


Expand Down Expand Up @@ -154,6 +183,7 @@ def log_job(self, job):
else:
logger.info('job {id} {status} meta={meta}'.format(**job))


class FilesApiV1Mixin:

def mask(self, paths, country=None, bbox=None, landonly=None, poll=None):
Expand Down Expand Up @@ -412,12 +442,10 @@ def __init__(
data_url='https://data.isimip.org/api/v1',
files_api_url='https://files.isimip.org/api/v2',
files_api_version='v2',
page_size=100,
params={},
auth=None,
headers={}
):
super().__init__(data_url, auth, headers, {
'page_size': page_size
})
super().__init__(data_url, params, auth, headers)
self.files_api_url = files_api_url
self.files_api_version = files_api_version
65 changes: 65 additions & 0 deletions isimip_client/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import click
from rich.console import Console
from rich.pretty import pretty_repr
from rich.table import Table
from rich.text import Text


class SearchArgumentType(click.ParamType):
name = "search"

def convert(self, value, param, ctx):
try:
search_key, search_value = value.split('=')
return (search_key, search_value)
except ValueError:
self.fail(f'{param} needs to be of the form key=value')


def print_results_table(results):
table = Table()
table.add_column('id', style='green')
table.add_column('path', style='cyan')
table.add_column('version')

for result in results:
row = [result[key] for key in ['id', 'path', 'version']]
table.add_row(*row)

console = Console()
console.print(table)


def print_details_table(details):
table = Table()
table.add_column('key')
table.add_column('value')
for key in [
'id',
'path',
'version',
'size',
'checksum',
'checksum_type',
'specifiers',
'resources',
'caveats',
'metadata_url',
'file_url',
'json_url'
]:
value = details.get(key)
if value is None:
continue
elif isinstance(value, (dict, list)):
table.add_row(key, pretty_repr(value))
else:
text = Text(str(value))
if key == 'id':
text.stylize('green')
elif key == 'path':
text.stylize('cyan')
table.add_row(key, text)

console = Console()
console.print(table)

0 comments on commit 0cc4cbc

Please sign in to comment.