Skip to content

Commit

Permalink
Add pyclass intersphinx references
Browse files Browse the repository at this point in the history
  • Loading branch information
ghiggi committed Jun 26, 2024
1 parent 299b4d8 commit 03ed769
Show file tree
Hide file tree
Showing 45 changed files with 324 additions and 323 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ omit =
gpm/retrievals/*
gpm/visualization/animation.py
gpm/utils/pyresample.py
gpm/utils/collocation.py
gpm/_version.py

[report]
Expand Down
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ share/python-wheels/
.installed.cfg
*.egg
MANIFEST
_version.py
gpm/_version.py
docs/source/tutorials/
docs/build/

lcov.info

# PyInstaller
# Usually these files are written by a python script from a template
Expand Down
10 changes: 0 additions & 10 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,3 @@ repos:
- id: rst-backticks
- id: rst-directive-colons
- id: rst-inline-touching-normal

# - repo: https://github.com/adrienverge/yamllint.git
# rev: v1.35.1
# hooks:
# - id: yamllint
# args: [-d relaxed]
# - repo: https://github.com/lyz-code/yamlfix/
# rev: 1.16.0
# hooks:
# - id: yamlfix
1 change: 0 additions & 1 deletion docs/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,3 @@ dependencies:
- sphinx==7.2.6
- sphinxcontrib-youtube
- ximage
- pip: [pycolorbar]
24 changes: 15 additions & 9 deletions gpm/bucket/partitioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,8 +441,8 @@ def quadmesh(self, origin="bottom"):
A quadrilateral mesh is a grid of M by N adjacent quadrilaterals that are defined via a (M+1, N+1)
grid of vertices.
The quadrilateral mesh is accepted by `matplotlib.pyplot.pcolormesh`, `matplotlib.collections.QuadMesh`
`matplotlib.collections.PolyQuadMesh`.
The quadrilateral mesh is accepted by :py:class:`matplotlib.pyplot.pcolormesh`,
:py:class:`matplotlib.collections.QuadMesh` and :py:class:`matplotlib.collections.PolyQuadMesh`.
Parameters
----------
Expand All @@ -464,7 +464,7 @@ def vertices(self, origin="bottom", ccw=True):
"""Return the partitions vertices in an array of shape (N, M, 4, 2).
The output vertices, once the first 2 dimension are flattened,
can be passed directly to a `matplotlib.PolyCollection`.
can be passed directly to a :py:class:`matplotlib.PolyCollection`.
For plotting with cartopy, the polygon order must be "counterclockwise".
Parameters
Expand Down Expand Up @@ -587,7 +587,7 @@ def add_labels(self, df, x, y, remove_invalid_rows=True):
Parameters
----------
df : `pandas.DataFrame`, `dask.DataFrame`, `polars.DataFrame`, `pyarrow.Table` or `polars.LazyFrame`
df : pandas.DataFrame, dask.DataFrame, polars.DataFrame, pyarrow.Table or polars.LazyFrame
Dataframe to which add partitions centroids.
x : str
Column name with the x coordinate.
Expand All @@ -599,7 +599,7 @@ def add_labels(self, df, x, y, remove_invalid_rows=True):
Returns
-------
df : `pandas.DataFrame`, `dask.DataFrame`, `polars.DataFrame`, `pyarrow.Table` or `polars.LazyFrame`
df : pandas.DataFrame, dask.DataFrame, polars.DataFrame, pyarrow.Table or polars.LazyFrame
Dataframe with the partitions label(s) column(s).
"""
Expand Down Expand Up @@ -631,7 +631,7 @@ def add_centroids(self, df, x, y, x_coord=None, y_coord=None, remove_invalid_row
Parameters
----------
df : `pandas.DataFrame`, `dask.DataFrame`, `polars.DataFrame`, `pyarrow.Table` or `polars.LazyFrame`
df : pandas.DataFrame, dask.DataFrame, polars.DataFrame, pyarrow.Table or polars.LazyFrame
Dataframe to which add partitions centroids.
x : str
Column name with the x coordinate.
Expand All @@ -649,7 +649,7 @@ def add_centroids(self, df, x, y, x_coord=None, y_coord=None, remove_invalid_row
Returns
-------
df : `pandas.DataFrame`, `dask.DataFrame`, `polars.DataFrame`, `pyarrow.Table` or `polars.LazyFrame`
df : pandas.DataFrame, dask.DataFrame, polars.DataFrame, pyarrow.Table or polars.LazyFrame
Dataframe with the partitions centroids x and y coordinates columns.
"""
Expand Down Expand Up @@ -832,8 +832,14 @@ def __init__(
# -----------------------------------------------------------------------------------.
def _custom_labels_function(self, x_indices, y_indices):
"""Return the partition labels as function of the specified 2D partitions indices."""
x_labels = self.x_centroids[x_indices].round(self._labels_decimals[0]).astype(str)
y_labels = self.y_centroids[y_indices].round(self._labels_decimals[1]).astype(str)
x_labels_value = self.x_centroids[x_indices].round(self._labels_decimals[0])
y_labels_value = self.y_centroids[y_indices].round(self._labels_decimals[1])
if self._labels_decimals[0] == 0:
x_labels_value = x_labels_value.astype(int)
if self._labels_decimals[1] == 0:
y_labels_value = y_labels_value.astype(int)
x_labels = x_labels_value.astype(str)
y_labels = y_labels_value.astype(str)
return x_labels, y_labels

def to_dict(self):
Expand Down
2 changes: 1 addition & 1 deletion gpm/bucket/readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def read_bucket(
Returns
-------
df : `pandas.DataFrame`, `polars.DataFrame`, `polars.LazyFrame` or `pyarrow.Table`
df : pandas.DataFrame, polars.DataFrame, polars.LazyFrame or pyarrow.Table
Bucket dataframe.
"""
Expand Down
8 changes: 4 additions & 4 deletions gpm/bucket/routines.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def write_granule_bucket(
File path of the granule to store in the bucket archive.
bucket_dir: str
Base directory of the per-granule bucket archive.
partitioning: `gpm.bucket.SpatialPartitioning`
partitioning: gpm.bucket.SpatialPartitioning
A spatial partitioning class.
granule_to_df_func : Callable
Function taking a granule filepath, opening it and returning a pandas or dask dataframe.
Expand Down Expand Up @@ -143,7 +143,7 @@ def write_granules_bucket(
File paths of the GPM granules to store in the bucket archive.
bucket_dir: str
Base directory of the per-granule bucket archive.
partitioning: `gpm.bucket.SpatialPartitioning`
partitioning: gpm.bucket.SpatialPartitioning
A spatial partitioning class.
Carefully consider the size of the partitions.
Earth partitioning by:
Expand Down Expand Up @@ -243,11 +243,11 @@ def write_bucket(
Parameters
----------
ds : `pandas.DataFrame` or `dask.DataFrame`
ds : pandas.DataFrame or dask.DataFrame
Pandas or Dask dataframe to be written into a geographic bucket.
bucket_dir: str
Base directory of the geographic bucket archive.
partitioning: `gpm.bucket.SpatialPartitioning`
partitioning: gpm.bucket.SpatialPartitioning
A spatial partitioning class.
Carefully consider the size of the partitions.
Earth partitioning by:
Expand Down
Loading

0 comments on commit 03ed769

Please sign in to comment.