diff --git a/Dockerfile b/Dockerfile index 96a8cf8..18a66e5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ COPY elastic_datashader /build/elastic_datashader WORKDIR /build/elastic_datashader RUN poetry build -FROM python:3.11 AS deployment +FROM python:3.11-slim AS deployment LABEL maintainer="foss@spectric.com" RUN useradd -d /home/datashader datashader && \ mkdir -p /home/datashader /opt/elastic_datashader/tms-cache && \ @@ -23,8 +23,8 @@ COPY --from=builder /build/dist/*.whl /home/datashader/tmp/ ENV PATH="$PATH:/home/datashader/.local/bin" RUN pip install --upgrade pip && \ pip install --no-cache-dir /home/datashader/tmp/*.whl && \ - pip install gunicorn==20.1.0 && \ - pip install uvicorn==0.22.0 + pip install gunicorn==21.2.0 && \ + pip install uvicorn==0.24.0 COPY deployment/logging_config.yml /opt/elastic_datashader/ COPY deployment/gunicorn_config.py /opt/elastic_datashader/ diff --git a/elastic_datashader/main.py b/elastic_datashader/main.py index 49c2a5d..6cab99f 100644 --- a/elastic_datashader/main.py +++ b/elastic_datashader/main.py @@ -1,7 +1,7 @@ from asyncio import create_task from fastapi import FastAPI - +from fastapi.middleware.cors import CORSMiddleware import urllib3 from .cache import background_cache_cleanup @@ -30,6 +30,19 @@ app.include_router(legend.router) app.include_router(tms.router) + + + +origins = ["*"] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + @app.on_event("startup") async def app_startup(): create_task(background_cache_cleanup()) diff --git a/elastic_datashader/parameters.py b/elastic_datashader/parameters.py index 1aef86c..58ac792 100644 --- a/elastic_datashader/parameters.py +++ b/elastic_datashader/parameters.py @@ -12,6 +12,7 @@ from elasticsearch import Elasticsearch from elasticsearch.exceptions import NotFoundError, ConflictError from elasticsearch_dsl import Document +from pydantic import BaseModel, Field from .config import config from .elastic import get_search_base, build_dsl_filter @@ -19,6 +20,21 @@ from .timeutil import quantize_time_range, convert_kibana_time +class SearchParams(BaseModel): + geopoint_field: str + params: dict + cmap: str = Field(default="bym") + resolution: str = Field(default="finest") + span_range: str = Field(default="auto", alias='span') + spread: str = Field(default="auto") # Point Size + timeOverlap: bool = Field(default=False) + timeOverlapSize: str = Field(default="auto") + timestamp_field: str = Field(default="@timestamp") + search_nautical_miles: int = Field(default=50) + geofield_type: str = Field(default='geo_point') + bucket_max: float = Field(default=100, ge=0, le=100) + bucket_min: float = Field(default=0, ge=0, le=1) + def create_default_params() -> Dict[str, Any]: return { "category_field": None, @@ -287,7 +303,7 @@ def extract_parameters(headers: Dict[Any, Any], query_params: Dict[Any, Any]) -> params["highlight"] = query_params.get("highlight") params["spread"] = normalize_spread(query_params.get("spread")) params["resolution"] = query_params.get("resolution", params["resolution"]) - params["use_centroid"] = query_params.get("use_centroid", default=params["use_centroid"]) + params["use_centroid"] = query_params.get("use_centroid", params["use_centroid"]) params["cmap"] = get_cmap(query_params.get("cmap", None), category_field) params["span_range"] = query_params.get("span", "auto") params["geopoint_field"] = query_params.get("geopoint_field", params["geopoint_field"]) diff --git a/elastic_datashader/routers/tms.py b/elastic_datashader/routers/tms.py index 034d885..23275f6 100644 --- a/elastic_datashader/routers/tms.py +++ b/elastic_datashader/routers/tms.py @@ -3,11 +3,12 @@ from typing import Optional import time import uuid +import json from elasticsearch import Elasticsearch from elasticsearch.exceptions import NotFoundError from elasticsearch_dsl import Document from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, Response -from fastapi.responses import RedirectResponse +from fastapi.responses import RedirectResponse, JSONResponse from starlette.datastructures import URL from ..cache import ( @@ -26,7 +27,7 @@ from ..drawing import generate_x_tile from ..elastic import get_es_headers, get_search_base from ..logger import logger -from ..parameters import extract_parameters, merge_generated_parameters +from ..parameters import extract_parameters, merge_generated_parameters, SearchParams from ..tilegen import ( TILE_HEIGHT_PX, TILE_WIDTH_PX, @@ -148,8 +149,8 @@ def cached_response(es, idx, x, y, z, params, parameter_hash) -> Optional[Respon try: es.update( # pylint: disable=E1123 - ".datashader_tiles", - tile_id(idx, x, y, z, parameter_hash), + index=".datashader_tiles", + id=tile_id(idx, x, y, z, parameter_hash), body={"script" : {"source": "ctx._source.cache_hits++"}}, retry_on_conflict=5, ) @@ -281,7 +282,7 @@ def generate_tile_to_cache(idx: str, x: int, y: int, z: int, params, parameter_h logger.debug("Releasing cache placeholder %s", rendering_tile_name(idx, x, y, z, parameter_hash)) release_cache_placeholder(config.cache_path, rendering_tile_name(idx, x, y, z, parameter_hash)) -async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, background_tasks: BackgroundTasks): +async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, background_tasks: BackgroundTasks, post_params=None): check_proxy_key(request.headers.get('tms-proxy-key')) es = Elasticsearch( @@ -289,10 +290,13 @@ async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z: verify_certs=False, timeout=120, ) - + if post_params is None: + post_params = {} # Get hash and parameters try: - parameter_hash, params = extract_parameters(request.headers, request.query_params) + print(request.query_params) + print(post_params) + parameter_hash, params = extract_parameters(request.headers, {**request.query_params, **post_params}) # try to build the dsl object bad filters cause exceptions that are then retried. # underlying elasticsearch_dsl doesn't support the elasticsearch 8 api yet so this causes requests to thrash # If the filters are bad or elasticsearch_dsl cannot build the request will never be completed so serve X tile @@ -344,3 +348,14 @@ async def get_tms(idx: str, x: int, y: int, z: int, request: Request, background @router.get("/{already_waited}/{idx}/{z}/{x}/{y}.png") async def get_tms_after_wait(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, background_tasks: BackgroundTasks): return await fetch_or_render_tile(already_waited, idx, x, y, z, request, background_tasks) + + +@router.post("/{idx}/{z}/{x}/{y}.png") +async def post_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, params: SearchParams, background_tasks: BackgroundTasks): + params = params.dict() + params["params"] = json.dumps(params["params"]) + response = await fetch_or_render_tile(0, idx, x, y, z, request, background_tasks, post_params=params) + if isinstance(response, RedirectResponse): + print(already_waited) + return JSONResponse(status_code=200, content={"retry-after": response.headers['retry-after']}) + return response diff --git a/elastic_datashader/tilegen.py b/elastic_datashader/tilegen.py index dadca76..e644484 100644 --- a/elastic_datashader/tilegen.py +++ b/elastic_datashader/tilegen.py @@ -969,7 +969,7 @@ def generate_tile(idx, x, y, z, headers, params, tile_width_px=256, tile_height_ # Create base search base_s = get_search_base(config.elastic_hosts, headers, params, idx) - base_s = base_s[0:0] + # base_s = base_s[0:0] # Now find out how many documents count_s = copy.copy(base_s)[0:0] # slice of array sets from/size since we are aggregating the data we don't need the hits count_s = count_s.filter("geo_bounding_box", **{geopoint_field: bb_dict}) @@ -1023,7 +1023,7 @@ def generate_tile(idx, x, y, z, headers, params, tile_width_px=256, tile_height_ geotile_precision = min(max(current_zoom, current_zoom + agg_zooms), MAXIMUM_PERCISION) tile_s = copy.copy(base_s) - tile_s = tile_s.params(size=0, track_total_hits=False) + tile_s = tile_s.params(track_total_hits=False) tile_s = tile_s.filter( "geo_bounding_box", **{geopoint_field: bb_dict} ) diff --git a/pyproject.toml b/pyproject.toml index 731813c..387a309 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,9 @@ elastic_datashader = "elastic_datashader.cli:main" [tool.poetry.dependencies] python = ">=3.10,<4" -elasticsearch = "7.17.4" -elasticsearch-dsl = "7.4.0" -datashader = "0.15.2" +elasticsearch = "8.11.1" +elasticsearch-dsl = "8.11.0" +datashader = "0.16.0" pandas = "^1.5.3" colorcet = "^3.0.1" mercantile = "1.2.1" @@ -32,11 +32,11 @@ Pillow = "*" pynumeral = "*" arrow = "*" python-datemath = "*" -numba = "0.57.0" +numba = "0.57.1" numpy = "^1.23" PyYAML = "*" humanize = "*" -uvicorn = {extras = ["standard"], version = "^0.18.2", optional = true} +uvicorn = {extras = ["standard"], version = "0.24.0", optional = true} fastapi = "^0.96" georgio = "2023.156.924" jinja2 = "3.1.2" @@ -57,6 +57,7 @@ localwebserver = ["uvicorn"] [tool.pylint.'MESSAGES CONTROL'] max-line-length = 150 +extension-pkg-whitelist = "pydantic" disable = "too-many-nested-blocks,too-many-branches,too-many-statements,R0801,R0902,R0903,R0911,R0913,R0914,C0103,C0114,C0115,C0116,C0123,C0301,C0302,fixme" [tool.black]