Skip to content

Commit

Permalink
Fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
Sean Sullivan committed Dec 20, 2023
1 parent caa1168 commit ce596cb
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 14 deletions.
12 changes: 6 additions & 6 deletions elastic_datashader/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ class SearchParams(BaseModel):
geopoint_field: str
params: dict
cmap: str = Field(default="bym")
resolution:str = Field(default="finest")
span_range:str = Field(default="auto", alias='span')
spread:str = Field(default="auto") # Point Size
timeOverlap:bool = Field(default=False)
timeOverlapSize:str = Field(default="auto")
timestamp_field:str = Field(default="@timestamp")
resolution: str = Field(default="finest")
span_range: str = Field(default="auto", alias='span')
spread: str = Field(default="auto") # Point Size
timeOverlap: bool = Field(default=False)
timeOverlapSize: str = Field(default="auto")
timestamp_field: str = Field(default="@timestamp")
search_nautical_miles: int = Field(default=50)
geofield_type: str = Field(default='geo_point')
bucket_max: float = Field(default=100, ge=0, le=100)
Expand Down
14 changes: 7 additions & 7 deletions elastic_datashader/routers/tms.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def generate_tile_to_cache(idx: str, x: int, y: int, z: int, params, parameter_h
logger.debug("Releasing cache placeholder %s", rendering_tile_name(idx, x, y, z, parameter_hash))
release_cache_placeholder(config.cache_path, rendering_tile_name(idx, x, y, z, parameter_hash))

async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, background_tasks: BackgroundTasks,post_params={}):
async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, background_tasks: BackgroundTasks, post_params={}):
check_proxy_key(request.headers.get('tms-proxy-key'))

es = Elasticsearch(
Expand All @@ -295,7 +295,7 @@ async def fetch_or_render_tile(already_waited: int, idx: str, x: int, y: int, z:
try:
print(request.query_params)
print(post_params)
parameter_hash, params = extract_parameters(request.headers, {**request.query_params,**post_params})
parameter_hash, params = extract_parameters(request.headers, {**request.query_params, **post_params})
# try to build the dsl object bad filters cause exceptions that are then retried.
# underlying elasticsearch_dsl doesn't support the elasticsearch 8 api yet so this causes requests to thrash
# If the filters are bad or elasticsearch_dsl cannot build the request will never be completed so serve X tile
Expand Down Expand Up @@ -350,15 +350,15 @@ async def get_tms_after_wait(already_waited: int, idx: str, x: int, y: int, z: i


@router.post("/{idx}/{z}/{x}/{y}.png")
async def post_tile(already_waited: int,idx: str, x: int, y: int, z: int, request: Request,params: SearchParams, background_tasks: BackgroundTasks):
async def post_tile(already_waited: int, idx: str, x: int, y: int, z: int, request: Request, params: SearchParams, background_tasks: BackgroundTasks):
params = params.dict()
params["params"] = json.dumps(params["params"])
response = await fetch_or_render_tile(0, idx, x, y, z, request, background_tasks,post_params=params)
if isinstance(response,RedirectResponse):
response = await fetch_or_render_tile(0, idx, x, y, z, request, background_tasks, post_params=params)
if isinstance(response, RedirectResponse):
print(response.headers)
return JSONResponse(status_code=200, content={"retry-after":response.headers['retry-after']})
return JSONResponse(status_code=200, content={"retry-after": response.headers['retry-after']})
return response





2 changes: 1 addition & 1 deletion elastic_datashader/tilegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,7 @@ def generate_tile(idx, x, y, z, headers, params, tile_width_px=256, tile_height_

# Create base search
base_s = get_search_base(config.elastic_hosts, headers, params, idx)
#base_s = base_s[0:0]
# base_s = base_s[0:0]
# Now find out how many documents
count_s = copy.copy(base_s)[0:0] # slice of array sets from/size since we are aggregating the data we don't need the hits
count_s = count_s.filter("geo_bounding_box", **{geopoint_field: bb_dict})
Expand Down

0 comments on commit ce596cb

Please sign in to comment.