diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..6c6102c --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,89 @@ +name: build + +on: + pull_request: + push: + branches: + - master + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + # By default, GitHub will maximize the number of jobs run in parallel + # depending on the available runners on GitHub-hosted virtual machines. + # max-parallel: 8 + fail-fast: false + matrix: + python-version: + - "3.8" + - "3.9" + - "3.10" + env: + TOXENV: ${{ matrix.tox-env }} + TOX_SKIP_MISSING_INTERPRETERS: False + steps: + - uses: actions/checkout@v2 + + - name: Run pre-commit hook + uses: pre-commit/action@v2.0.3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip version + run: pip install -U pip + + - name: Install test dependencies + run: pip install tox tox-gh-actions poetry + + - name: Run tox + run: tox + + deploy: + runs-on: ubuntu-latest + needs: test + if: endsWith(github.ref, '/master') + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install build dependencies + run: | + pip install poetry + + - name: Fetch version + id: fetch_version + run: echo "::set-output name=version_nr::$(poetry version -s)" + + - name: Build a binary wheel and a source tarball + # Note: poetry build required to support CLI script entrypoint in pyproject.toml?! + run: | + poetry build + + - name: Create GitHub Release + id: create_gh_release + uses: actions/create-release@v1 + env: + # use token provided by Actions + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VERSION: ${{ steps.fetch_version.outputs.version_nr }} + with: + tag_name: ${{env.VERSION}} + release_name: Release ${{env.VERSION}} + draft: false + prerelease: false + + - name: PyPI Publishing + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_DEPLOYMENT_API_KEY }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b103024..65565ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,71 +1,69 @@ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 - hooks: - - id: check-ast # Is it valid Python? - - id: debug-statements # no debbuging statements used - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - - id: check-case-conflict -# - id: check-executables-have-shebangs - - id: check-json - - id: pretty-format-json - args: [ "--autofix" ] - - id: check-merge-conflict - - id: name-tests-test - - id: check-docstring-first - - id: requirements-txt-fixer - # - id: detect-aws-credentials - - id: detect-private-key + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-ast # Is it valid Python? + - id: debug-statements # no debbuging statements used + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: check-case-conflict + # - id: check-executables-have-shebangs + - id: check-json + - id: pretty-format-json + args: [ "--autofix" ] + - id: check-merge-conflict + - id: check-docstring-first + - id: requirements-txt-fixer + # - id: detect-aws-credentials + - id: detect-private-key - - repo: https://github.com/pycqa/isort - rev: 5.7.0 - hooks: - - id: isort - args: [ "--profile", "black", "--filter-files" ] + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + args: [ "--profile", "black", "--filter-files" ] - - repo: https://github.com/psf/black - rev: 20.8b1 - hooks: - - id: black - language_version: python3 - - repo: https://github.com/asottile/blacken-docs - rev: v1.9.2 - hooks: - - id: blacken-docs - additional_dependencies: [ black==20.8b1 ] + - repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + language_version: python3 + args: + - --line-length=120 - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 - hooks: - - id: flake8 - exclude: "docs/" - args: ['--ignore','W503'] # line break before binary operator - additional_dependencies: - - flake8-bugbear - - flake8-comprehensions - - flake8-tidy-imports + - repo: https://github.com/asottile/blacken-docs + rev: v1.12.1 + hooks: + - id: blacken-docs + additional_dependencies: [ black ] - - repo: https://github.com/mgedmin/check-manifest - rev: "0.46" - hooks: - - id: check-manifest - args: [ "--no-build-isolation", "--ignore", "*.png,.travis/*,docs/*,build_n_install.py,publish.py,readthedocs.yml" ] + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + exclude: ^(docs|scripts|tests)/ + args: + - --max-line-length=120 + additional_dependencies: + - flake8-bugbear + - flake8-comprehensions + - flake8-tidy-imports - # TODO sphinx check - # - repo: https://github.com/myint/rstcheck - # rev: 'v3.3.1' - # hooks: - # - id: rstcheck + - repo: https://github.com/mgedmin/check-manifest + rev: "0.48" + hooks: + - id: check-manifest + args: [ "--no-build-isolation", "--ignore", "*.png,docs/*,publish.py,readthedocs.yml,poetry.lock,setup.py" ] + additional_dependencies: [ numpy, poetry==1.1.11 ] - - repo: https://github.com/asottile/pyupgrade - rev: v2.9.0 - hooks: - - id: pyupgrade + - repo: https://github.com/asottile/pyupgrade + rev: v2.36.0 + hooks: + - id: pyupgrade -# # very detailed linting: +# TODO enable for very detailed linting: # - repo: https://github.com/pycqa/pylint # rev: pylint-2.6.0 # hooks: diff --git a/.travis/install.sh b/.travis/install.sh deleted file mode 100755 index 39628cd..0000000 --- a/.travis/install.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -set -e -set -u - -pip install tox; diff --git a/.travis/run.sh b/.travis/run.sh deleted file mode 100755 index 11b3214..0000000 --- a/.travis/run.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -set -e -set -u - -export TOX_SKIP_MISSING_INTERPRETERS="False"; - -exec "$@"; diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b9cd66c..ce75665 100755 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,7 +2,11 @@ Changelog ========= -TODO python 3.6 support. tests not passing so far only for this python version +2.2.1 (2022-07-10) +------------------- + +* packaging completely based on pyproject.toml (poetry) +* CI/CD: automatic publishing based on GitHub Actions 2.2.0 (2021-01-25) ------------------- diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100755 index fa12197..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,23 +0,0 @@ -global-exclude *.py[cod] -global-exclude *.coverage - -include CHANGELOG.rst -include CONTRIBUTING.rst -include LICENSE -include README.rst -include VERSION -include example.json -include tox.ini -include runtests.py -include .editorconfig - - -include *.in -include *.txt -include *.yaml -include *.yml - -recursive-include tests *.py - -recursive-exclude * __pycache__ -recursive-exclude * *.DS_Store diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..ad35000 --- /dev/null +++ b/Makefile @@ -0,0 +1,38 @@ +pin: + @echo "pinning the dependencies specified in 'pyproject.toml':" + @poetry update -vv + #poetry export -f requirements.txt --output docs/requirements.txt --without-hashes + +req: + @echo "installing the development dependencies..." + @poetry install + @#poetry install --no-dev + + +update: pin req + +test: + @tox + #pytest + +hook: + @pre-commit install + @pre-commit run --all-files + +hook2: + @pre-commit autoupdate + +clean: + rm -rf .pytest_cache .coverage coverage.xml tests/__pycache__ .mypyp_cache/ .tox + + +build: + poetry build + +# documentation generation: +# https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html +docs: + (cd docs && make html) + + +.PHONY: clean test build docs diff --git a/README.rst b/README.rst index d7cf082..de73402 100644 --- a/README.rst +++ b/README.rst @@ -36,8 +36,7 @@ python package for fast geometric shortest path computation in 2D multi-polygon Quick Guide: -:: - +.. code-block:: console pip install extremitypathfinder diff --git a/TODOs.txt b/TODOs.txt deleted file mode 100644 index a0d034e..0000000 --- a/TODOs.txt +++ /dev/null @@ -1,4 +0,0 @@ -TODOs - -s. Issues -s. TODOs in code diff --git a/VERSION b/VERSION deleted file mode 100644 index ccbccc3..0000000 --- a/VERSION +++ /dev/null @@ -1 +0,0 @@ -2.2.0 diff --git a/build_n_install.py b/build_n_install.py deleted file mode 100755 index 3d33b8c..0000000 --- a/build_n_install.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -PACKAGE = "extremitypathfinder" -VERSION_FILE = "VERSION" -VIRT_ENVS = ["pathEnv"] -VIRT_ENV_COMMAND = ". ~/miniconda3/etc/profile.d/conda.sh; conda activate {virt_env}; " -PY_VERSION_IDS = [ - "36", - "37", - "38", -] # the supported python versions to create wheels for -PYTHON_TAG = ".".join([f"py{v}" for v in PY_VERSION_IDS]) - -if __name__ == "__main__": - - print("building now:") - # routine("python3 setup.py sdist bdist_wheel upload", 'Uploading the package now.') # deprecated - # new twine publishing routine: - # https://packaging.python.org/tutorials/packaging-projects/ - # delete the build folder before to get a fresh build - # TODO do not remove dist in the future - os.system("rm -r -f build") - os.system("rm -r -f dist") - - build_cmd = f"python setup.py sdist bdist_wheel --python-tag {PYTHON_TAG}" - os.system(build_cmd) - - # in all specified virtual environments - for virt_env in VIRT_ENVS: - virt_env_cmd = VIRT_ENV_COMMAND.format(virt_env=virt_env) - install_cmd = f"{virt_env_cmd} python setup.py install" - os.system(install_cmd) - - # routine(build_cmd, 'building the package now.', - # 'build done. check the included files! installing package in virtual environment next.') - # routine(install_cmd) - os.system("rm -r -f build") diff --git a/docs/0_getting_started.rst b/docs/0_getting_started.rst index 293d53f..2a8a682 100644 --- a/docs/0_getting_started.rst +++ b/docs/0_getting_started.rst @@ -10,7 +10,8 @@ Installation Installation with pip: -:: + +.. code-block:: console pip install extremitypathfinder diff --git a/docs/conf.py b/docs/conf.py index c7592df..50a65ad 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,6 +12,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. import os +import subprocess import sys # Get the project root dir, which is the parent dir of this @@ -25,19 +26,17 @@ import extremitypathfinder # needed for auto document, ATTENTION: must then be installed during online build! +print(extremitypathfinder) + # -- Project information ----------------------------------------------------- project = "extremitypathfinder" copyright = "2018, Jannik Michelfeit" author = "Jannik Michelfeit" - -def get_version(): - return open(os.path.join(project_root, "VERSION")).read() - - # The full version, including alpha/beta/rc tags. -release = get_version() +release = subprocess.getoutput("poetry version -s") +print("release version:", release) # -- General configuration --------------------------------------------------- diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..df255bc --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,3 @@ +numpy +Sphinx +sphinx-rtd-theme diff --git a/docs/requirements_docs.txt b/docs/requirements_docs.txt deleted file mode 100644 index aa19de0..0000000 --- a/docs/requirements_docs.txt +++ /dev/null @@ -1,5 +0,0 @@ -# basic dependencies: -# python3.6+ -numpy>=1.16 -sphinx_rtd_theme -matplotlib diff --git a/extremitypathfinder/extremitypathfinder.py b/extremitypathfinder/extremitypathfinder.py index 83d0c9d..52b5226 100644 --- a/extremitypathfinder/extremitypathfinder.py +++ b/extremitypathfinder/extremitypathfinder.py @@ -13,13 +13,7 @@ OBSTACLE_ITER_TYPE, PATH_TYPE, ) -from extremitypathfinder.helper_classes import ( - DirectedHeuristicGraph, - Edge, - Polygon, - PolygonVertex, - Vertex, -) +from extremitypathfinder.helper_classes import DirectedHeuristicGraph, Edge, Polygon, PolygonVertex, Vertex from extremitypathfinder.helper_fcts import ( check_data_requirements, convert_gridworld, @@ -52,9 +46,7 @@ class PolygonEnvironment: holes: List[Polygon] = None prepared: bool = False graph: DirectedHeuristicGraph = None - temp_graph: DirectedHeuristicGraph = ( - None # for storing and plotting the graph during a query - ) + temp_graph: DirectedHeuristicGraph = None # for storing and plotting the graph during a query _all_extremities: Optional[Set[PolygonVertex]] = None @property @@ -73,9 +65,7 @@ def all_extremities(self) -> Set[PolygonVertex]: self._all_extremities = set() for p in self.polygons: # only extremities that are actually within the map should be considered - self._all_extremities |= set( - filter(lambda e: self.within_map(e.coordinates), p.extremities) - ) + self._all_extremities |= set(filter(lambda e: self.within_map(e.coordinates), p.extremities)) return self._all_extremities @property @@ -109,18 +99,13 @@ def store( self.prepared = False # loading the map boundary_coordinates = np.array(boundary_coordinates) - list_of_hole_coordinates = [ - np.array(hole_coords) for hole_coords in list_of_hole_coordinates - ] + list_of_hole_coordinates = [np.array(hole_coords) for hole_coords in list_of_hole_coordinates] if validate: check_data_requirements(boundary_coordinates, list_of_hole_coordinates) self.boundary_polygon = Polygon(boundary_coordinates, is_hole=False) # IMPORTANT: make a copy of the list instead of linking to the same list (python!) - self.holes = [ - Polygon(coordinates, is_hole=True) - for coordinates in list_of_hole_coordinates - ] + self.holes = [Polygon(coordinates, is_hole=True) for coordinates in list_of_hole_coordinates] def store_grid_world( self, @@ -141,9 +126,7 @@ def store_grid_world( :param validate: whether the input should be validated :param simplify: whether the polygons should be simplified or not. reduces edge amount, allow diagonal edges """ - boundary_coordinates, list_of_hole_coordinates = convert_gridworld( - size_x, size_y, obstacle_iter, simplify - ) + boundary_coordinates, list_of_hole_coordinates = convert_gridworld(size_x, size_y, obstacle_iter, simplify) self.store(boundary_coordinates, list_of_hole_coordinates, validate) def export_pickle(self, path: str = DEFAULT_PICKLE_NAME): @@ -180,9 +163,7 @@ def prepare(self): # TODO include in storing functions? """ if self.prepared: - raise ValueError( - "this environment is already prepared. load new polygons first." - ) + raise ValueError("this environment is already prepared. load new polygons first.") # preprocessing the map # construct graph of visible (=directly reachable) extremities @@ -207,11 +188,7 @@ def prepare(self): # TODO include in storing functions? candidate_extremities = extremities_to_check.copy() # remove the extremities with the same coordinates as the query extremity candidate_extremities.difference_update( - { - c - for c in candidate_extremities - if c.get_angle_representation() is None - } + {c for c in candidate_extremities if c.get_angle_representation() is None} ) # these vertices all belong to a polygon @@ -296,9 +273,7 @@ def within_map(self, coords: INPUT_COORD_TYPE): """ # x, y = coords - if not inside_polygon( - x, y, self.boundary_polygon.coordinates, border_value=True - ): + if not inside_polygon(x, y, self.boundary_polygon.coordinates, border_value=True): return False for hole in self.holes: if inside_polygon(x, y, hole.coordinates, border_value=False): @@ -347,9 +322,7 @@ def find_shortest_path( goal_vertex = Vertex(goal_coordinates) # check the goal node first (earlier termination possible) - self.translate( - new_origin=goal_vertex - ) # do before checking angle representations! + self.translate(new_origin=goal_vertex) # do before checking angle representations! # IMPORTANT: manually translate the start vertex, because it is not part of any polygon # and hence does not get translated automatically start_vertex.mark_outdated() @@ -367,9 +340,7 @@ def find_shortest_path( # NOTE: all edges are being checked, it is computationally faster to compute all visibilities in one go candidates.add(start_vertex) - visibles_n_distances_goal = find_visible( - candidates, edges_to_check=set(self.all_edges) - ) + visibles_n_distances_goal = find_visible(candidates, edges_to_check=set(self.all_edges)) if len(visibles_n_distances_goal) == 0: # The goal node does not have any neighbours. Hence there is not possible path to the goal. return [], None @@ -391,9 +362,7 @@ def find_shortest_path( # add edges in the direction: extremity (v) -> goal self.temp_graph.add_directed_edge(v, goal_vertex, d) - self.translate( - new_origin=start_vertex - ) # do before checking angle representations! + self.translate(new_origin=start_vertex) # do before checking angle representations! # the visibility of only the graphs nodes have to be checked # the goal node does not have to be considered, because of the earlier check candidates = set( @@ -402,17 +371,13 @@ def find_shortest_path( self.graph.get_all_nodes(), ) ) - visibles_n_distances_start = find_visible( - candidates, edges_to_check=set(self.all_edges) - ) + visibles_n_distances_start = find_visible(candidates, edges_to_check=set(self.all_edges)) if len(visibles_n_distances_start) == 0: # The start node does not have any neighbours. Hence there is not possible path to the goal. return [], None # add edges in the direction: start -> extremity - self.temp_graph.add_multiple_directed_edges( - start_vertex, visibles_n_distances_start - ) + self.temp_graph.add_multiple_directed_edges(start_vertex, visibles_n_distances_start) # also here unnecessary edges in the graph can be deleted when start or goal lie in front of visible extremities # IMPORTANT: when a query point happens to coincide with an extremity, edges to the (visible) extremities @@ -460,9 +425,7 @@ def find_shortest_path( self.temp_graph.remove_multiple_undirected_edges(vertex, lie_in_front) # NOTE: exploiting property 2 from [1] here would be more expensive than beneficial - vertex_path, distance = self.temp_graph.modified_a_star( - start_vertex, goal_vertex - ) + vertex_path, distance = self.temp_graph.modified_a_star(start_vertex, goal_vertex) if free_space_after: del self.temp_graph # free the memory diff --git a/extremitypathfinder/helper_classes.py b/extremitypathfinder/helper_classes.py index 35ab8aa..a25fdc0 100644 --- a/extremitypathfinder/helper_classes.py +++ b/extremitypathfinder/helper_classes.py @@ -198,9 +198,7 @@ def __init__(self, coordinate_list, is_hole): len(coordinate_list), ) - self.vertices: List[PolygonVertex] = [ - PolygonVertex(coordinate) for coordinate in coordinate_list - ] + self.vertices: List[PolygonVertex] = [PolygonVertex(coordinate) for coordinate in coordinate_list] self.edges: List[Edge] = [] vertex1 = self.vertices[-1] @@ -235,9 +233,7 @@ def _find_extremities(self): p3 = v3.coordinates # since consequent vertices are not permitted to be equal, # the angle representation of the difference is well defined - if ( - AngleRepresentation(p3 - p2).value - AngleRepresentation(p1 - p2).value - ) % 4 < 2.0: + if (AngleRepresentation(p3 - p2).value - AngleRepresentation(p1 - p2).value) % 4 < 2.0: # basic idea: # - translate the coordinate system to have p2 as origin # - compute the angle representations of both vectors representing the edges @@ -272,9 +268,7 @@ def translate(self, new_origin: Vertex): class SearchState(object): __slots__ = ["node", "distance", "neighbours", "path", "cost_so_far", "priority"] - def __init__( - self, node, distance, neighbour_generator, path, cost_so_far, cost_estim - ): + def __init__(self, node, distance, neighbour_generator, path, cost_so_far, cost_estim): self.node = node self.distance = distance # TODO @@ -285,9 +279,7 @@ def __init__( # = cost_so_far + cost_estim (= start-current + estimate(current-goal)) self.priority: float = cost_so_far + cost_estim - def __lt__( - self, other - ): # defines an ordering -> items can be stored in a sorted heap + def __lt__(self, other): # defines an ordering -> items can be stored in a sorted heap return self.priority < other.priority @@ -433,9 +425,7 @@ def join_identical(self): while len(nodes_to_check) > 1: n1 = nodes_to_check.pop() coordinates1 = n1.coordinates - same_nodes = { - n for n in nodes_to_check if np.allclose(coordinates1, n.coordinates) - } + same_nodes = {n for n in nodes_to_check if np.allclose(coordinates1, n.coordinates)} nodes_to_check.difference_update(same_nodes) for n2 in same_nodes: # print('removing duplicate node', n2) @@ -492,9 +482,7 @@ def enqueue_neighbours(): except StopIteration: # there is no neighbour left return - state = SearchState( - next_node, distance, neighbours, path, cost_so_far, cost_estim - ) + state = SearchState(next_node, distance, neighbours, path, cost_so_far, cost_estim) search_state_queue.put(state) self.set_goal_node(goal) # lazy update of the heuristic diff --git a/extremitypathfinder/helper_fcts.py b/extremitypathfinder/helper_fcts.py index 8ee545a..f3211bd 100644 --- a/extremitypathfinder/helper_fcts.py +++ b/extremitypathfinder/helper_fcts.py @@ -20,10 +20,7 @@ def inside_polygon(x, y, coords, border_value): p = np.array([x, y]) p1 = coords[-1, :] for p2 in coords[:]: - if ( - abs(AngleRepresentation(p1 - p).value - AngleRepresentation(p2 - p).value) - == 2.0 - ): + if abs(AngleRepresentation(p1 - p).value - AngleRepresentation(p2 - p).value) == 2.0: return border_value p1 = p2 @@ -45,9 +42,7 @@ def inside_polygon(x, y, coords, border_value): # depending on the position of p2 this determines whether the polygon edge is right or left of the point # to avoid expensive division the divisors (of the slope dy/dx) are brought to the other side # ( dy/dx > a == dy > a * dx ) - if (x1GEx and x2GEx) or ( - (x1GEx or x2GEx) and (y2 - y) * (x2 - x1) <= (y2 - y1) * (x2 - x) - ): + if (x1GEx and x2GEx) or ((x1GEx or x2GEx) and (y2 - y) * (x2 - x1) <= (y2 - y1) * (x2 - x)): contained = not contained else: @@ -57,9 +52,7 @@ def inside_polygon(x, y, coords, border_value): # only crossings "right" of the point should be counted x1GEx = x <= x1 x2GEx = x <= x2 - if (x1GEx and x2GEx) or ( - (x1GEx or x2GEx) and (y2 - y) * (x2 - x1) >= (y2 - y1) * (x2 - x) - ): + if (x1GEx and x2GEx) or ((x1GEx or x2GEx) and (y2 - y) * (x2 - x1) >= (y2 - y1) * (x2 - x)): contained = not contained y1 = y2 @@ -195,9 +188,7 @@ def check_polygon(polygon): # TODO test # todo - polygons must not intersect each other -def check_data_requirements( - boundary_coords: np.ndarray, list_hole_coords: List[np.ndarray] -): +def check_data_requirements(boundary_coords: np.ndarray, list_hole_coords: List[np.ndarray]): """ensures that all the following conditions on the polygons are fulfilled: - basic polygon requirements (s. above) - edge numbering has to follow this convention (for easier computations): @@ -209,9 +200,7 @@ def check_data_requirements( """ check_polygon(boundary_coords) if has_clockwise_numbering(boundary_coords): - raise ValueError( - "Vertex numbering of the boundary polygon must be counter clockwise." - ) + raise ValueError("Vertex numbering of the boundary polygon must be counter clockwise.") for hole_coords in list_hole_coords: check_polygon(hole_coords) if not has_clockwise_numbering(hole_coords): @@ -220,9 +209,7 @@ def check_data_requirements( # TODO data rectification -def find_within_range( - repr1, repr2, repr_diff, vertex_set, angle_range_less_180, equal_repr_allowed -): +def find_within_range(repr1, repr2, repr_diff, vertex_set, angle_range_less_180, equal_repr_allowed): """ filters out all vertices whose representation lies within the range between the two given angle representations @@ -301,9 +288,7 @@ def not_within_eq(vertex): return set(filter(filter_fct, vertex_set)) -def convert_gridworld( - size_x: int, size_y: int, obstacle_iter: iter, simplify: bool = True -) -> (list, list): +def convert_gridworld(size_x: int, size_y: int, obstacle_iter: iter, simplify: bool = True) -> (list, list): """ prerequisites: grid world must not have non-obstacle cells which are surrounded by obstacles ("single white cell in black surrounding" = useless for path planning) @@ -321,10 +306,7 @@ def convert_gridworld( if len(obstacle_iter) == 0: # there are no obstacles. return just the simple boundary rectangle - return [ - np.array(x, y) - for x, y in [(0, 0), (size_x, 0), (size_x, size_y), (0, size_y)] - ], [] + return [np.array(x, y) for x, y in [(0, 0), (size_x, 0), (size_x, size_y), (0, size_y)]], [] # convert (x,y) into np.arrays # obstacle_iter = [np.array(o) for o in obstacle_iter] @@ -383,9 +365,7 @@ def construct_polygon(start_pos, boundary_detect_fct, cntr_clockwise_wanted: boo # left has to be checked first # do not check if just turned left or right (-> the left is blocked for sure) # left_pos = current_pos + left_vect - if not ( - just_turned or boundary_detect_fct(current_pos + directions[left_index]) - ): + if not (just_turned or boundary_detect_fct(current_pos + directions[left_index])): # print('< turn left') forward_index = left_index left_index = (forward_index - 1) % 4 @@ -430,9 +410,7 @@ def construct_polygon(start_pos, boundary_detect_fct, cntr_clockwise_wanted: boo start_pos = find_start(start_pos=(0, 0), boundary_detect_fct=is_unblocked) # print(start_pos+directions[3]) # raise ValueError - boundary_edges = construct_polygon( - start_pos, boundary_detect_fct=is_blocked, cntr_clockwise_wanted=True - ) + boundary_edges = construct_polygon(start_pos, boundary_detect_fct=is_blocked, cntr_clockwise_wanted=True) if simplify: # TODO @@ -443,26 +421,18 @@ def construct_polygon(start_pos, boundary_detect_fct, cntr_clockwise_wanted: boo # shift coordinates by +(0.5,0.5) for correct detection # the border value does not matter here unchecked_obstacles = [ - o - for o in obstacle_iter - if inside_polygon(o[0] + 0.5, o[1] + 0.5, boundary_edges, border_value=True) + o for o in obstacle_iter if inside_polygon(o[0] + 0.5, o[1] + 0.5, boundary_edges, border_value=True) ] hole_list = [] while len(unchecked_obstacles) > 0: - start_pos = find_start( - start_pos=(0, 0), boundary_detect_fct=pos_in_iter, iter=unchecked_obstacles - ) - hole = construct_polygon( - start_pos, boundary_detect_fct=is_unblocked, cntr_clockwise_wanted=False - ) + start_pos = find_start(start_pos=(0, 0), boundary_detect_fct=pos_in_iter, iter=unchecked_obstacles) + hole = construct_polygon(start_pos, boundary_detect_fct=is_unblocked, cntr_clockwise_wanted=False) # detect which of the obstacles still do not belong to any hole: # delete the obstacles which are included in the just constructed hole unchecked_obstacles = [ - o - for o in unchecked_obstacles - if not inside_polygon(o[0] + 0.5, o[1] + 0.5, hole, border_value=True) + o for o in unchecked_obstacles if not inside_polygon(o[0] + 0.5, o[1] + 0.5, hole, border_value=True) ] if simplify: @@ -505,6 +475,8 @@ def find_visible(vertex_candidates, edges_to_check): edge = edges_to_check.pop() lies_on_edge = False + range_less_180 = False + v1, v2 = edge.vertex1, edge.vertex2 if v1.get_distance_to_origin() == 0.0: # vertex1 has the same coordinates as the query vertex -> on the edge @@ -537,7 +509,6 @@ def find_visible(vertex_candidates, edges_to_check): if repr_diff == 2.0: # angle == 180deg -> on the edge lies_on_edge = True - range_less_180 = False # does actually not matter here if lies_on_edge: # when the query vertex lies on an edge (or vertex) no behind/in front checks must be performed! @@ -601,12 +572,8 @@ def find_visible(vertex_candidates, edges_to_check): # if a candidate is farther away from the query point than both vertices of the edge, # it surely lies behind the edge max_distance = max(v1.get_distance_to_origin(), v2.get_distance_to_origin()) - vertices_behind = set( - filter( - lambda extr: extr.get_distance_to_origin() > max_distance, - vertices_to_check, - ) - ) + vertices_behind = {v for v in vertices_to_check if v.get_distance_to_origin() > max_distance} + # they do not have to be checked, no intersection computation necessary # TODO improvement: increase the neighbouring edges' priorities when there were extremities behind vertices_to_check.difference_update(vertices_behind) diff --git a/extremitypathfinder/plotting.py b/extremitypathfinder/plotting.py index 9cd6c0e..da4caaf 100644 --- a/extremitypathfinder/plotting.py +++ b/extremitypathfinder/plotting.py @@ -198,6 +198,7 @@ class PlottingEnvironment(PolygonEnvironment): """Extends PolygonEnvironment. In addition to the base functionality it plots graphs of the polygons, the visibility graph and the computed path. Stores all graphs in the folder defined by plotting_dir parameter.""" + def __init__(self, plotting_dir=PLOTTING_DIR): super().__init__() global PLOTTING_DIR @@ -218,13 +219,9 @@ def prepare(self): def find_shortest_path(self, *args, **kwargs): """Also draws the computed shortest path.""" # important to not delete the temp graph! for plotting - vertex_path, distance = super().find_shortest_path( - *args, free_space_after=False, **kwargs - ) + vertex_path, distance = super().find_shortest_path(*args, free_space_after=False, **kwargs) - if ( - self.temp_graph - ): # in some cases (e.g. direct path possible) no graph is being created! + if self.temp_graph: # in some cases (e.g. direct path possible) no graph is being created! draw_graph(self, self.temp_graph) draw_with_path(self, self.temp_graph, vertex_path) draw_only_path(self, vertex_path) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..83247b6 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,884 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "babel" +version = "2.10.3" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +name = "certifi" +version = "2022.6.15" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "charset-normalizer" +version = "2.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "cycler" +version = "0.11.0" +description = "Composable style cycles" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "distlib" +version = "0.3.4" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "filelock" +version = "3.7.1" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] + +[[package]] +name = "fonttools" +version = "4.34.4" +description = "Tools to manipulate font files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +all = ["fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "zopfli (>=0.1.4)", "lz4 (>=1.7.4.2)", "matplotlib", "sympy", "skia-pathops (>=0.5.0)", "uharfbuzz (>=0.23.0)", "brotlicffi (>=0.8.0)", "scipy", "brotli (>=1.0.1)", "munkres", "unicodedata2 (>=14.0.0)", "xattr"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["scipy", "munkres"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=14.0.0)"] +woff = ["zopfli (>=0.1.4)", "brotlicffi (>=0.8.0)", "brotli (>=1.0.1)"] + +[[package]] +name = "identify" +version = "2.5.1" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.12.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kiwisolver" +version = "1.4.3" +description = "A fast implementation of the Cassowary constraint solver" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "matplotlib" +version = "3.5.2" +description = "Python plotting package" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.0.1" +numpy = ">=1.17" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.2.1" +python-dateutil = ">=2.7" +setuptools_scm = ">=4" + +[[package]] +name = "nodeenv" +version = "1.7.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" + +[[package]] +name = "numpy" +version = "1.23.1" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.8" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pillow" +version = "9.2.0" +description = "Python Imaging Library (Fork)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.19.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pygments" +version = "2.12.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2022.1" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools-scm" +version = "7.0.4" +description = "the blessed package to manage your versions by scm tags" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=20.0" +tomli = ">=1.0.0" +typing-extensions = "*" + +[package.extras] +test = ["pytest (>=6.2)", "virtualenv (>20)"] +toml = ["setuptools (>=42)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "4.5.0" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tox" +version = "3.25.1" +description = "tox is a generic virtualenv management and test command line tool" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} +filelock = ">=3.0.0" +packaging = ">=14" +pluggy = ">=0.12.0" +py = ">=1.4.17" +six = ">=1.14.0" +toml = ">=0.9.4" +virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" + +[package.extras] +docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] + +[[package]] +name = "typing-extensions" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.10" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.15.1" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +platformdirs = ">=2,<3" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] + +[[package]] +name = "zipp" +version = "3.8.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.8" +content-hash = "16ab09eb70efbe3fc8d946786d03da08acc22c6a1fc2101838e6f04ea27a4c73" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +atomicwrites = [] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +babel = [] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +charset-normalizer = [] +colorama = [] +cycler = [] +distlib = [ + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, +] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] +filelock = [ + {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, + {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, +] +fonttools = [] +identify = [ + {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, + {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [] +importlib-metadata = [] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +kiwisolver = [] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +matplotlib = [] +nodeenv = [] +numpy = [] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pillow = [] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +pre-commit = [ + {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, + {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pytz = [ + {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, + {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [] +setuptools-scm = [] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +sphinx = [ + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tox = [] +typing-extensions = [] +urllib3 = [] +virtualenv = [] +zipp = [ + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, +] diff --git a/publish.py b/publish.py deleted file mode 100755 index 93c50f5..0000000 --- a/publish.py +++ /dev/null @@ -1,279 +0,0 @@ -""" -required packages -numpy -(numba) - - -these packages have to be installed in virtual environment in use: - -right python version! (will influence the tox environments!) -for testing: -conda install pytest -conda install isort -conda install twine -conda install black -conda install -c conda-forge pre-commit - -PRE COMMIT COMMANDS -pre-commit run --all-files -pre-commit run -pre-commit autoupdate - -pip install rstcheck pip-tools - -rstcheck>=3.3.1 -twine for uploading securely - -documentation generation: -conda install sphinx -https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html - -pip install sphinx_rtd_theme - -Use the Makefile to build the docs, like so: -cd ./docs -make html - - ---cov-config=tox.ini - -pip-tools package: -TODO write bash script for this -its important to pin requirements to get reproducible errors! -compile a new requirements file (with the latest versions) - -source activate tzEnv -pip-compile --upgrade -same as?!: -pip-compile --output-file requirements_tests.txt requirements_tests.in -only update the flask package: -pip-compile --upgrade-package flask -compile a new requirements file (with versions currently used in the virtual env ) -pip-compile --generate-hashes requirements_numba.in - -do NOT sync. will install ONLY the packages specified! (tox etc. would not be installed any more!) -pip-sync - -commands -tox -r to rebuild your tox virtualenvs when you've made changes to requirements setup -# rstcheck will complain about non referenced hyperlinks in doc .rst files! (cannot detect cross file references!) -rstcheck *.rst -tox -r -e codestyle -tox -r -e py37 -tox -r -e py37-numba - -automatically update imports: isort -rc . -dont use for playground.py - - -Use the Makefile to build the docs, like so: -cd ./docs -make html -# for online build of docs, release tag must be created! - -use bandit to check for vulnerabilities: - -conda install bandit -bandit ./timezonefinder/*.py -""" - -import os -import re -import sys -from os.path import abspath, isfile, join, pardir - -PACKAGE = "extremitypathfinder" -VERSION_FILE = "VERSION" -VIRT_ENV_NAME = "pathEnv" -VIRT_ENV_COMMAND = ( - f". ~/miniconda3/etc/profile.d/conda.sh; conda activate {VIRT_ENV_NAME}; " -) -# TODO '36', -PY_VERSION_IDS = ["37", "38"] # the supported python versions to create wheels for -PYTHON_TAG = ".".join([f"py{v}" for v in PY_VERSION_IDS]) - - -def get_version(): - return open(VERSION_FILE, "r").read().strip() - - -def parse_version(new_version_input="", old_version_str="1.0.0"): - new_version_input = re.search(r"\d\.\d\.\d+", new_version_input) - - if new_version_input is None: - raise ValueError # will cause new input request - else: - new_version_input = new_version_input.group() - - # print(new_version_input) - - split_new_version = [int(x) for x in new_version_input.split(".")] - # print(split_new_version) - split_old_version = [int(x) for x in old_version_str.split(".")] - # print(split_old_version) - - for i in range(3): - if split_new_version[i] > split_old_version[i]: - break - if split_new_version[i] < split_old_version[i]: - raise ValueError # will cause new input request - - return new_version_input - - -def set_version(new_version_str): - with open(VERSION_FILE, "w") as version_file: - version_file.write(new_version_str) - - -def routine(command=None, message="", option1="next", option2="exit"): - while 1: - print(message) - if command: - print("running command:", command) - os.system(command) - - print("__________\nDone. Options:") - print("1)", option1) - print("2)", option2) - print("anything else to repeat this step.") - try: - inp = int(input()) - - if inp == 1: - print("==============") - break - if inp == 2: - sys.exit() - - except ValueError: - pass - print("================") - - -if __name__ == "__main__": - - print('Do you want to switch to the "dev" branch? Commit before switching branch!') - print("1) yes, change now.") - print("2) no, exit") - print("anything else skip.") - try: - inp = int(input()) - if inp == 1: - os.system("git checkout dev") - print("==============") - if inp == 2: - sys.exit() - except ValueError: - pass - - os.system("pre-commit autoupdate") - routine("pre-commit run --all-files", "running all pre-commit hooks") - - old_version = get_version() - - print("The actual version number is:", old_version) - print("Enter new version number:") - version_input = None - while 1: - try: - version_input = input() - version_str = parse_version(version_input, old_version) - set_version(version_str) - break - except ValueError: - print( - f'Invalid version input. Should be of format "x.x.xxx" and higher than the old version {old_version}.' - ) - pass # try again - - version = get_version() - print("the version number has been set to:", version) - print("=====================") - - routine( - None, - "Remember to properly specify all supported python versions in publish.py and setup.py", - ) - routine( - None, - "Maybe re-pin the test dependencies (requirements.txt) with pip-compile!" - " Commands are written in the beginning of this script", - ) - routine( - None, - "Have all pinned dependencies been listed in setup.py and the Documentation?", - ) - routine(None, "Have all (new) features been documented?") - routine(None, f"Remember to write a changelog now for version {version}") - - print("___________") - print("Running TESTS:") - - routine( - f"{VIRT_ENV_COMMAND} rstcheck *.rst", - "checking syntax of all .rst files:", - "next: build check", - ) - - print("generating documentation now...") - os.system("(cd ./docs && exec make html)") - print("done.") - - # IMPORTANT: -r flag to rebuild tox virtual env - # only when dependencies have changed! - rebuild_flag = "" - print( - "when the dependencies (in requirements_docs.txt) have changed enter 1 (-> rebuild tox)" - ) - try: - inp = int(input()) - if inp == 1: - rebuild_flag = "-r" - except ValueError: - pass - - routine(f"{VIRT_ENV_COMMAND} tox {rebuild_flag} -e py37", "run tests") - print("Tests finished.") - - routine( - None, - "Please commit your changes, push and wait if Travis tests build successfully. " - "Only then merge them into the master.", - "CI tests passed & merge into master complete. Build and upload now.", - ) - - print("=================") - print("PUBLISHING:") - - # routine("python3 setup.py sdist bdist_wheel upload", 'Uploading the package now.') # deprecated - # new twine publishing routine: - # https://packaging.python.org/tutorials/packaging-projects/ - # delete the build folder before to get a fresh build - routine( - f"rm -r -f build; python setup.py sdist bdist_wheel --python-tag {PYTHON_TAG}", - "building the package now.", - "build done. check the included files! test uploading.", - ) - - path = abspath(join(__file__, pardir, "dist")) - all_archives_this_version = [ - f for f in os.listdir(path) if isfile(join(path, f)) and version_str in f - ] - paths2archives = [abspath(join(path, f)) for f in all_archives_this_version] - command = "twine upload --repository-url https://test.pypi.org/legacy/ " + " ".join( - paths2archives - ) - - # upload all archives of this version - routine(VIRT_ENV_COMMAND + command, "testing if upload works.") - - command = "twine upload " + " ".join(paths2archives) - routine(VIRT_ENV_COMMAND + command, "real upload to PyPI.") - - # tag erstellen - routine(None, "Do you want to create a git release tag?", "Yes", "No") - routine( - f"git tag -a v{version} -m 'Version {version}'; git push --tags", "Creating tag" - ) - print(f"______________\nCongrats! Published version {version}.") diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..b5a65e3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "extremitypathfinder" +version = "2.2.1" +license = "MIT" +readme = "README.rst" +repository = "https://github.com/jannikmi/extremitypathfinder" +homepage = "https://extremitypathfinder.readthedocs.io/en/latest/" +documentation = "https://extremitypathfinder.readthedocs.io/en/latest/" +keywords = ["path-planning", "path-finding", "shortest-path", "visibility", "graph", "polygon", "grid", "map", "robotics", "navigation", "offline"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "Natural Language :: English", + "Operating System :: OS Independent", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Education", + "Topic :: Games/Entertainment" +] +description = "python package implementing a multivariate Horner scheme for efficiently evaluating multivariate polynomials" +authors = ["jannikmi "] +include = [ + ".editorconfig", + ".pre-commit-config.yaml", + "CHANGELOG.rst", + "CONTRIBUTING.rst", + "LICENSE", + "Makefile", + "README.rst", + "tox.ini", + "tests/*.py", + "example.json", +] +#exclude = ["my_package/excluded.py"] + +[tool.poetry.scripts] +extremitypathfinder = "extremitypathfinder.command_line:main" + +[tool.poetry.dependencies] +python = "^3.8" +numpy = "^1.22" + +[tool.poetry.dev-dependencies] +pytest = "^6.2.5" +tox = "^3.24.4" +pre-commit = "^2.15.0" +# docs +Sphinx = "^4.3.1" +sphinx-rtd-theme = "^1.0.0" +matplotlib = "^3.5.2" + +[build-system] +requires = ["poetry-core>=1.0.7", "poetry==1.1.11"] +build-backend = "poetry.core.masonry.api" diff --git a/readthedocs.yml b/readthedocs.yml index 7462ec9..4505802 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -14,6 +14,6 @@ formats: all # Optionally set the version of Python and requirements required to build your docs python: - version: 3.7 + version: 3.8 install: - - requirements: docs/requirements_docs.txt + - requirements: docs/requirements.txt diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 267f350..0000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -# basic dependencies: -# python3.7+ -numpy>=1.16 -matplotlib diff --git a/requirements_tests.in b/requirements_tests.in deleted file mode 100755 index 17456dd..0000000 --- a/requirements_tests.in +++ /dev/null @@ -1,17 +0,0 @@ -# Test dependencies -# will be pinned by pip-compile in 'requirements_tests.txt' -# python3.7+ - -sphinx_rtd_theme - -numpy>=1.16 -matplotlib - -pytest -pytest-cov - -# fix vulnerabilities: -py>=1.10.0 -pygments>=2.7.4 -urllib3>=1.26.4 -jinja2>=2.11.3 diff --git a/requirements_tests.txt b/requirements_tests.txt deleted file mode 100644 index 6af5de4..0000000 --- a/requirements_tests.txt +++ /dev/null @@ -1,106 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements_tests.txt requirements_tests.in -# -alabaster==0.7.12 - # via sphinx -attrs==19.3.0 - # via pytest -babel==2.9.1 - # via sphinx -certifi==2020.12.5 - # via requests -chardet==4.0.0 - # via requests -coverage==5.0.3 - # via pytest-cov -cycler==0.10.0 - # via matplotlib -docutils==0.16 - # via sphinx -idna==2.10 - # via requests -imagesize==1.2.0 - # via sphinx -jinja2==2.11.3 - # via - # -r requirements_tests.in - # sphinx -kiwisolver==1.3.1 - # via matplotlib -markupsafe==1.1.1 - # via jinja2 -matplotlib==3.3.3 - # via -r requirements_tests.in -more-itertools==8.2.0 - # via pytest -numpy==1.22.0 - # via - # -r requirements_tests.in - # matplotlib -packaging==20.3 - # via - # pytest - # sphinx -pillow==9.0.1 - # via matplotlib -pluggy==0.13.1 - # via pytest -py==1.10.0 - # via - # -r requirements_tests.in - # pytest -pygments==2.9.0 - # via - # -r requirements_tests.in - # sphinx -pyparsing==2.4.6 - # via - # matplotlib - # packaging -pytest==5.4.1 - # via - # -r requirements_tests.in - # pytest-cov -pytest-cov==2.8.1 - # via -r requirements_tests.in -python-dateutil==2.8.1 - # via matplotlib -pytz==2020.5 - # via babel -requests==2.25.1 - # via sphinx -six==1.14.0 - # via - # cycler - # packaging - # python-dateutil -snowballstemmer==2.1.0 - # via sphinx -sphinx==3.4.3 - # via sphinx-rtd-theme -sphinx-rtd-theme==0.5.1 - # via -r requirements_tests.in -sphinxcontrib-applehelp==1.0.2 - # via sphinx -sphinxcontrib-devhelp==1.0.2 - # via sphinx -sphinxcontrib-htmlhelp==1.0.3 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.3 - # via sphinx -sphinxcontrib-serializinghtml==1.1.4 - # via sphinx -urllib3==1.26.5 - # via - # -r requirements_tests.in - # requests -wcwidth==0.1.8 - # via pytest - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/runtests.py b/runtests.py deleted file mode 100755 index 53ba5ac..0000000 --- a/runtests.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -from __future__ import absolute_import, division, print_function, unicode_literals - -import sys - -import pytest - - -def main(): - sys.path.insert(0, "tests") - return pytest.main() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/setup.cfg b/setup.cfg deleted file mode 100755 index 71c8fe0..0000000 --- a/setup.cfg +++ /dev/null @@ -1,47 +0,0 @@ -[metadata] -version = file: VERSION -long_description = file: README.rst -long_description_content_type = text/x-rst -license_file = LICENSE - -[bdist_wheel] -; does not run on Python 2 and 3 -universal = 0 - -[isort] -;https://github.com/timothycrosley/isort/wiki/isort-Settings -include_trailing_comma = True -known_first_party = extremitypathfinder -known_third_party = helpers,matplotlib,numpy,pytest,setuptools -line_length = 120 -multi_line_output = 5 -balanced_wrapping = True -use_parentheses = True - -[coverage:run] -branch = True -include = */extremitypathfinder/* - -[coverage:paths] -source = - extremitypathfinder - .tox/*/site-packages - -[coverage:report] -show_missing = True - -[flake8] -max_line_length = 120 -;trailing comma -ignore = C819 - -[tool:multilint] -paths = extremitypathfinder - setup.py - tests - -[tool:pytest] -; TODO make it work -testpaths = tests -addopts = --cov=extremitypathfinder - --cov-report term-missing diff --git a/setup.py b/setup.py deleted file mode 100755 index 69384ab..0000000 --- a/setup.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding:utf-8 -*- - -from setuptools import setup - -setup( - name="extremitypathfinder", - packages=["extremitypathfinder"], - description="python package for geometric shortest path computation for given 2D multi-polygon maps", - # version: in VERSION file https://packaging.python.org/guides/single-sourcing-package-version/ - # With this approach you must make sure that the VERSION file is included in all your source - # and binary distributions (e.g. add include VERSION to your MANIFEST.in). - author="Jannik Michelfeit", - author_email="python@michelfe.it", - license="MIT licence", - url="https://github.com/jannikmi/extremitypathfinder", # use the URL to the github repo - project_urls={ - "Source Code": "https://github.com/jannikmi/extremitypathfinder", - "Documentation": "https://github.com/jannikmi/extremitypathfinder/blob/master/README.rst", - "Changelog": "https://github.com/jannikmi/extremitypathfinder/blob/master/CHANGELOG.rst", - }, - keywords="path-planning path-finding shortest-path visibility graph visibility-graph polygon" - "robotics navigation offline ", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Intended Audience :: Information Technology", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Artificial Intelligence", - ], - long_description="python package for fast geometric shortest path computation in 2D multi-polygon " - "or grid environments based on visibility graphs." - "Please refer to the `documentation `__.", - python_requires=">=3.7", - install_requires=[ - "numpy>=1.16", - ], - entry_points={ - "console_scripts": [ - "extremitypathfinder=extremitypathfinder.command_line:main" - ], - }, -) diff --git a/tests/cli_test.py b/tests/cli_test.py new file mode 100644 index 0000000..5ef684c --- /dev/null +++ b/tests/cli_test.py @@ -0,0 +1,24 @@ +import subprocess +from pathlib import Path +from typing import List + +import pytest + +PROJECT_DIR = Path(__file__).parent.parent +EXAMPLE_FILE = PROJECT_DIR / "example.json" + + +@pytest.mark.parametrize( + "cmd", + [ + f"extremitypathfinder {EXAMPLE_FILE} -s 2.5 3.2 -g 7.9 6.8", + ], +) +def test_main(cmd: List[str]): + res = subprocess.getoutput(cmd) + assert not res.endswith("command not found"), "package not installed" + splits = res.split(" ") + length = float(splits[-1]) + print("length:", length) + path = " ".join(splits[:-1]) + print("path:", path) diff --git a/tests/helper_fcts_test.py b/tests/helper_fcts_test.py index f8a99ba..2e1deed 100755 --- a/tests/helper_fcts_test.py +++ b/tests/helper_fcts_test.py @@ -6,11 +6,7 @@ from extremitypathfinder import PolygonEnvironment from extremitypathfinder.helper_classes import AngleRepresentation -from extremitypathfinder.helper_fcts import ( - has_clockwise_numbering, - inside_polygon, - read_json, -) +from extremitypathfinder.helper_fcts import has_clockwise_numbering, inside_polygon, read_json # TODO test find_visible(), ... @@ -39,9 +35,7 @@ def test_inside_polygon(self): for border_value in [True, False]: def test_fct(input): - polygon_test_case = np.array( - [(-1.0, -1.0), (1.0, -1.0), (1.0, 1.0), (-1.0, 1.0)] - ) + polygon_test_case = np.array([(-1.0, -1.0), (1.0, -1.0), (1.0, 1.0), (-1.0, 1.0)]) x, y = input return inside_polygon(x, y, polygon_test_case, border_value) diff --git a/tests/helpers.py b/tests/helpers.py index 68384a5..60c0e78 100755 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -3,9 +3,5 @@ def proto_test_case(data, fct): # print(input, expected_output, fct(input)) actual_output = fct(input) if actual_output != expected_output: - print( - "input: {} expected: {} got: {}".format( - input, expected_output, actual_output - ) - ) + print("input: {} expected: {} got: {}".format(input, expected_output, actual_output)) assert actual_output == expected_output diff --git a/tests/main_test.py b/tests/main_test.py index ac1c0ef..d86edcf 100755 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -94,24 +94,24 @@ (((5, 5), (5, 7)), ([(5, 5), (5, 7)], 2.0)), # should have direct connection to all visible extremities! connected in graph (((6, 8), (5, 7)), ([(6, 8), (5, 7)], sqrt(2))), - (((4, 1), (5, 7)), ([(4, 1), (5, 7)], sqrt(1 ** 2 + 6 ** 2))), + (((4, 1), (5, 7)), ([(4, 1), (5, 7)], sqrt(1**2 + 6**2))), # should have direct connection to all visible extremities! even if not connected in graph! - (((4, 2), (5, 7)), ([(4, 2), (5, 7)], sqrt(1 ** 2 + 5 ** 2))), + (((4, 2), (5, 7)), ([(4, 2), (5, 7)], sqrt(1**2 + 5**2))), # mix of edges and vertices, directly visible - (((2, 2), (5, 7)), ([(2, 2), (5, 7)], sqrt(3 ** 2 + 5 ** 2))), + (((2, 2), (5, 7)), ([(2, 2), (5, 7)], sqrt(3**2 + 5**2))), # also regular points should have direct connection to all visible extremities! - (((10, 3), (17, 6)), ([(10, 3), (17, 6)], sqrt(7 ** 2 + 3 ** 2))), - (((10, 3), (8, 8)), ([(10, 3), (8, 8)], sqrt(2 ** 2 + 5 ** 2))), + (((10, 3), (17, 6)), ([(10, 3), (17, 6)], sqrt(7**2 + 3**2))), + (((10, 3), (8, 8)), ([(10, 3), (8, 8)], sqrt(2**2 + 5**2))), # even if the query point lies in front of an extremity! (test if new query vertices are being created!) - (((10, 3), (8, 5)), ([(10, 3), (8, 5)], sqrt(2 ** 2 + 2 ** 2))), + (((10, 3), (8, 5)), ([(10, 3), (8, 5)], sqrt(2**2 + 2**2))), # using a* graph search: # directly reachable through a single vertex (does not change distance!) - (((5, 1), (3, 3)), ([(5, 1), (4, 2), (3, 3)], sqrt(2 ** 2 + 2 ** 2))), + (((5, 1), (3, 3)), ([(5, 1), (4, 2), (3, 3)], sqrt(2**2 + 2**2))), # If two Polygons have vertices with identical coordinates (this is allowed), # paths through these vertices are theoretically possible! ( ((6.5, 5.5), (7.5, 6.5)), - ([(6.5, 5.5), (7, 6), (7.5, 6.5)], sqrt(1 ** 2 + 1 ** 2)), + ([(6.5, 5.5), (7, 6), (7.5, 6.5)], sqrt(1**2 + 1**2)), ), # distance should stay the same even if multiple extremities lie on direct path # test if path is skipping passed extremities @@ -175,7 +175,7 @@ (((1, 0), (5, 0)), ([(1, 0), (5, 0)], 4.0)), # both # on edge of hole (((4, 8), (3, 8)), ([(4, 8), (3, 8)], 1.0)), - (((4, 8), (4.1, 8.1)), ([(4, 8), (4.1, 8.1)], sqrt(2 * (0.1 ** 2)))), # both + (((4, 8), (4.1, 8.1)), ([(4, 8), (4.1, 8.1)], sqrt(2 * (0.1**2)))), # both # on vertex (((9, 5), (8, 5)), ([(9, 5), (8, 5)], 1.0)), # on vertex of hole @@ -183,11 +183,11 @@ # on two vertices # coinciding with edge (direct neighbour) (((3, 7), (5, 9)), ([(3, 7), (5, 9)], sqrt(8))), - (((4.6, 7), (5, 9)), ([(4.6, 7), (5, 9)], sqrt((0.4 ** 2) + (2 ** 2)))), + (((4.6, 7), (5, 9)), ([(4.6, 7), (5, 9)], sqrt((0.4**2) + (2**2)))), # should have direct connection to all visible extremities! connected in graph (((5, 4), (5, 9)), ([(5, 4), (5, 9)], 5)), # should have a direct connection to all visible extremities! even if not connected in graph! - (((9, 5), (5, 9)), ([(9, 5), (5, 9)], sqrt(2 * (4 ** 2)))), + (((9, 5), (5, 9)), ([(9, 5), (5, 9)], sqrt(2 * (4**2)))), # using a* graph search: # directly reachable through a single vertex (does not change distance!) (((9, 4), (9, 6)), ([(9, 4), (9, 5), (9, 6)], 2)), @@ -268,9 +268,7 @@ SEPARATED_ENV = ( [(5, 5), (-5, 5), (-5, -5), (5, -5)], - [ - [(-5.1, 1), (-5.1, 2), (5.1, 2), (5.1, 1)] - ], # intersecting polygons -> no path possible + [[(-5.1, 1), (-5.1, 2), (5.1, 2), (5.1, 1)]], # intersecting polygons -> no path possible # [[(-5, 1), (-5, 2), (5, 2), (5, 1)]], # hole lies on the edges -> path possible ) @@ -292,18 +290,14 @@ def validate(start_coordinates, goal_coordinates, expected_output): if expected_length is None: correct_result = length is None and path == expected_path else: - correct_result = path == expected_path and length == pytest.approx( - expected_length - ) + correct_result = path == expected_path and length == pytest.approx(expected_length) if correct_result: status_str = "OK" else: status_str = "XX" print(f"{status_str} input: {(start_coordinates, goal_coordinates)} ") if PLOT_TEST_RESULTS: - assert ( - correct_result - ), f"unexpected result (path, length): got {output} instead of {expected_output} " + assert correct_result, f"unexpected result (path, length): got {output} instead of {expected_output} " print("testing if path and distance are correct:") for ((start_coordinates, goal_coordinates), expected_output) in test_cases: @@ -319,14 +313,10 @@ def test_fct(self): grid_env = ENVIRONMENT_CLASS(**CONSTRUCTION_KWARGS) grid_env.store_grid_world(*GRID_ENV_PARAMS, simplify=False, validate=False) - assert ( - len(list(grid_env.all_extremities)) == 17 - ), "extremities do not get detected correctly!" + assert len(list(grid_env.all_extremities)) == 17, "extremities do not get detected correctly!" grid_env.prepare() # raise ValueError - assert ( - len(grid_env.graph.all_nodes) == 16 - ), "identical nodes should get joined in the graph!" + assert len(grid_env.graph.all_nodes) == 16, "identical nodes should get joined in the graph!" # test if points outside the map are being rejected for start_coordinates, goal_coordinates in INVALID_DESTINATION_DATA: @@ -339,9 +329,7 @@ def test_fct(self): # when the deep copy mechanism works correctly # even after many queries the internal graph should have the same structure as before # otherwise the temporarily added vertices during a query stay stored - assert ( - len(grid_env.graph.all_nodes) == 16 - ), "the graph should stay unchanged by shortest path queries!" + assert len(grid_env.graph.all_nodes) == 16, "the graph should stay unchanged by shortest path queries!" nr_nodes_env1_old = len(grid_env.graph.all_nodes) @@ -362,9 +350,7 @@ def test_fct(self): assert ( nr_nodes_env1_new == nr_nodes_env1_old ), "node amount of an grid_env should not change by creating another grid_env!" - assert ( - grid_env.graph is not poly_env.graph - ), "different environments share the same graph object" + assert grid_env.graph is not poly_env.graph, "different environments share the same graph object" assert ( grid_env.graph.all_nodes is not poly_env.graph.all_nodes ), "different environments share the same set of nodes" diff --git a/tox.ini b/tox.ini index 851f86f..49b92fa 100755 --- a/tox.ini +++ b/tox.ini @@ -1,25 +1,18 @@ [tox] +isolated_build = true envlist = - py{37,38} -; py{36,37,38} + py{38,39,310} skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} -[testenv] - -basepython = - py36: python3.6 - py37: python3.7 - py38: python3.8 +[gh-actions] +python = + 3.8: py38 + 3.9: py39 + 3.10: py310 -; IMPORTANT: --no-deps ensures ONLY the dependencies given in requirements.txt are being installed -install_command = pip install --no-deps {opts} {packages} -deps = -r{toxinidir}/requirements_tests.txt -;commands = pytest {posargs} -commands = coverage run -p --source=extremitypathfinder ./runtests.py {posargs} - -passenv = -# See https://github.com/codecov/codecov-python/blob/5b9d539a6a09bc84501b381b563956295478651a/README.md#using-tox - codecov: TOXENV - codecov: CI - codecov: TRAVIS TRAVIS_* +[testenv] +whitelist_externals = poetry +commands = + poetry install -v + poetry run pytest {posargs}