diff --git a/.github/workflows/rust-docker-build.yml b/.github/workflows/rust-docker-build.yml index d44e5d847e601..6c97bc8e7731a 100644 --- a/.github/workflows/rust-docker-build.yml +++ b/.github/workflows/rust-docker-build.yml @@ -1,10 +1,11 @@ -name: Build container images +name: Build rust container images on: workflow_dispatch: push: paths: - 'rust/**' + - '.github/workflows/rust-docker-build.yml' branches: - 'master' diff --git a/.github/workflows/rust-hook-migrator-docker.yml b/.github/workflows/rust-hook-migrator-docker.yml index 2dd7c01d015dc..edfeca0bb8734 100644 --- a/.github/workflows/rust-hook-migrator-docker.yml +++ b/.github/workflows/rust-hook-migrator-docker.yml @@ -1,10 +1,11 @@ -name: Build hook-migrator docker image +name: Build rust hook-migrator docker image on: workflow_dispatch: push: paths: - 'rust/**' + - '.github/workflows/rust-hook-migrator-docker.yml' branches: - 'master' @@ -57,7 +58,7 @@ jobs: id: meta uses: docker/metadata-action@v4 with: - images: ghcr.io/posthog/hog-rs/hook-migrator + images: ghcr.io/posthog/posthog/hook-migrator tags: | type=ref,event=pr type=ref,event=branch diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index c2c379334980e..2c5f40f1afe4c 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -1,4 +1,4 @@ -name: Rust +name: Rust test CI on: workflow_dispatch: @@ -31,8 +31,12 @@ jobs: rust: # Avoid running rust tests for irrelevant changes - 'rust/**' + - '.github/workflows/rust.yml' + - '.github/workflows/rust-docker-build.yml' + - '.github/workflows/rust-hook-migrator-docker.yml' build: + name: Build rust services needs: changes runs-on: depot-ubuntu-22.04-4 @@ -68,6 +72,7 @@ jobs: run: cargo build --all --locked --release && find target/release/ -maxdepth 1 -executable -type f | xargs strip test: + name: Test rust services needs: changes runs-on: depot-ubuntu-22.04-4 timeout-minutes: 10 @@ -118,6 +123,7 @@ jobs: run: cargo test --all-features linting: + name: Lint rust services needs: changes runs-on: depot-ubuntu-22.04-4 @@ -163,6 +169,7 @@ jobs: run: cargo check --all-features shear: + name: Shear rust services needs: changes runs-on: depot-ubuntu-22.04-4 diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index d4015b9fa212e..696a2354b9131 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -542,6 +542,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "limit": { @@ -555,6 +556,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "offset": { @@ -628,6 +630,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "limit": { @@ -638,6 +641,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "offset": { @@ -709,6 +713,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "limit": { @@ -719,6 +724,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "offset": { @@ -774,6 +780,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -781,6 +788,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -861,6 +869,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "limit": { @@ -875,6 +884,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "offset": { @@ -1010,9 +1020,11 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1076,6 +1088,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1083,6 +1096,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1134,6 +1148,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1141,6 +1156,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1192,6 +1208,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1199,6 +1216,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1253,6 +1271,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1260,6 +1279,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1311,6 +1331,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1318,6 +1339,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1369,6 +1391,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1376,6 +1399,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1433,6 +1457,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1440,6 +1465,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -1501,6 +1527,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "limit": { @@ -1511,6 +1538,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "offset": { @@ -1574,6 +1602,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -1581,6 +1610,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { @@ -3927,6 +3957,33 @@ "required": ["results"], "type": "object" }, + "GenericCachedQueryResponse": { + "properties": { + "cache_key": { + "type": "string" + }, + "is_cached": { + "type": "boolean" + }, + "last_refresh": { + "format": "date-time", + "type": "string" + }, + "next_allowed_client_refresh": { + "format": "date-time", + "type": "string" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "timezone": { + "type": "string" + } + }, + "required": ["cache_key", "is_cached", "last_refresh", "next_allowed_client_refresh", "timezone"], + "type": "object" + }, "GoalLine": { "additionalProperties": false, "properties": { @@ -7027,6 +7084,10 @@ { "const": "force_cache", "type": "string" + }, + { + "const": "lazy_async", + "type": "string" } ] }, @@ -7668,6 +7729,7 @@ "type": "boolean" }, "last_refresh": { + "format": "date-time", "type": "string" }, "modifiers": { @@ -7675,6 +7737,7 @@ "description": "Modifiers used when performing the query" }, "next_allowed_client_refresh": { + "format": "date-time", "type": "string" }, "query_status": { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index e9ddad131be2b..fbc3e14da00a4 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -927,7 +927,14 @@ export type LifecycleFilter = { showLegend?: LifecycleFilterLegacy['show_legend'] } -export type RefreshType = boolean | 'async' | 'blocking' | 'force_async' | 'force_blocking' | 'force_cache' +export type RefreshType = + | boolean + | 'async' + | 'blocking' + | 'force_async' + | 'force_blocking' + | 'force_cache' + | 'lazy_async' export interface QueryRequest { /** Client provided query ID. Can be used to retrieve the status or cancel the query. */ @@ -977,7 +984,9 @@ export interface AnalyticsQueryResponseBase { interface CachedQueryResponseMixin { is_cached: boolean + /** @format date-time */ last_refresh: string + /** @format date-time */ next_allowed_client_refresh: string cache_key: string timezone: string @@ -987,6 +996,8 @@ interface CachedQueryResponseMixin { type CachedQueryResponse = T & CachedQueryResponseMixin +export type GenericCachedQueryResponse = CachedQueryResponse> + export interface QueryStatusResponse { query_status: QueryStatus } diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 8b3dded51e26e..116841d004e3f 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -1,3 +1,4 @@ +import posthoganalytics import json from functools import lru_cache from typing import Any, Optional, Union, cast @@ -63,9 +64,9 @@ from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import ( conversion_to_query_based, ) -from posthog.hogql_queries.query_runner import execution_mode_from_refresh +from posthog.hogql_queries.query_runner import execution_mode_from_refresh, ExecutionMode from posthog.kafka_client.topics import KAFKA_METRICS_TIME_TO_SEE_DATA -from posthog.models import DashboardTile, Filter, Insight, User +from posthog.models import DashboardTile, Filter, Insight, User, Team from posthog.models.activity_logging.activity_log import ( Change, Detail, @@ -544,6 +545,27 @@ def to_representation(self, instance: Insight): return representation + def is_async_shared_dashboard(self, team: Team) -> bool: + flag_enabled = posthoganalytics.feature_enabled( + "hogql-dashboard-async", + str(team.uuid), + groups={ + "organization": str(team.organization_id), + "project": str(team.id), + }, + group_properties={ + "organization": { + "id": str(team.organization_id), + }, + "project": { + "id": str(team.id), + }, + }, + only_evaluate_locally=True, + send_feature_flag_events=False, + ) + return flag_enabled and self.context.get("is_shared", False) + @lru_cache(maxsize=1) def insight_result(self, insight: Insight) -> InsightResult: from posthog.caching.calculate_results import calculate_for_query_based_insight @@ -555,6 +577,12 @@ def insight_result(self, insight: Insight) -> InsightResult: refresh_requested = refresh_requested_by_client(self.context["request"]) execution_mode = execution_mode_from_refresh(refresh_requested) + if ( + self.is_async_shared_dashboard(insight.team) + and execution_mode == ExecutionMode.CACHE_ONLY_NEVER_CALCULATE + ): + execution_mode = ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE + return calculate_for_query_based_insight( insight, dashboard=dashboard, diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index 8da7b7e9a8cd6..e7be2c72e45ec 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -1,7 +1,7 @@ from unittest import mock from unittest.mock import ANY, MagicMock, patch -from dateutil import parser +from dateutil.parser import isoparse from django.test import override_settings from django.utils import timezone from django.utils.timezone import now @@ -386,11 +386,11 @@ def test_refresh_cache(self): item_trends.refresh_from_db() self.assertEqual( - parser.isoparse(response_data["tiles"][0]["last_refresh"]), + isoparse(response_data["tiles"][0]["last_refresh"]), item_default.caching_state.last_refresh, ) self.assertEqual( - parser.isoparse(response_data["tiles"][1]["last_refresh"]), + isoparse(response_data["tiles"][1]["last_refresh"]), item_default.caching_state.last_refresh, ) diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index fe484222f532e..ee21f46cc6a3d 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -1,3 +1,4 @@ +from datetime import datetime from typing import TYPE_CHECKING, Any, Optional, Union from pydantic import BaseModel @@ -149,7 +150,7 @@ def calculate_for_query_based_insight( cache_key = response.get("cache_key") last_refresh = response.get("last_refresh") - if isinstance(cache_key, str) and isinstance(last_refresh, str): + if isinstance(cache_key, str) and isinstance(last_refresh, datetime): update_cached_state( # Updating the relevant InsightCachingState insight.team_id, cache_key, diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py index 4482e3cd306a9..422fa5a82d75f 100644 --- a/posthog/hogql_queries/actors_query_runner.py +++ b/posthog/hogql_queries/actors_query_runner.py @@ -1,5 +1,4 @@ import itertools -from datetime import timedelta from typing import Optional from collections.abc import Sequence, Iterator from posthog.hogql import ast @@ -254,9 +253,6 @@ def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): if self.source_query_runner: self.source_query_runner.apply_dashboard_filters(dashboard_filter) - def _refresh_frequency(self): - return timedelta(minutes=1) - def _remove_aliases(self, node: ast.Expr) -> ast.Expr: if isinstance(node, ast.Alias): return self._remove_aliases(node.expr) diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index 28e26629a2f00..2a3e68f192ad6 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -265,6 +265,3 @@ def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): def select_input_raw(self) -> list[str]: return ["*"] if len(self.query.select) == 0 else self.query.select - - def _refresh_frequency(self): - return timedelta(minutes=1) diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index ade68d81f3fe3..ddf98d500ff1d 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -1,4 +1,3 @@ -from datetime import timedelta from typing import Optional, cast from collections.abc import Callable @@ -66,9 +65,6 @@ def calculate(self) -> HogQLQueryResponse: response = response.model_copy(update={**paginator.response_params(), "results": paginator.results}) return response - def _refresh_frequency(self): - return timedelta(minutes=1) - def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): self.query.filters = self.query.filters or HogQLFilters() diff --git a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py index 33026970c1e2e..e37ee95ce6b48 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py @@ -1,5 +1,4 @@ import dataclasses -from datetime import timedelta from typing import Literal, Optional, Any, TypedDict, cast from posthog.constants import AUTOCAPTURE_EVENT @@ -139,9 +138,6 @@ def __init__( ) # for typings self._funnel_actors_generator = funnel_order_actor_class - def _refresh_frequency(self): - return timedelta(minutes=1) - def calculate(self) -> FunnelCorrelationResponse: """ Funnel Correlation queries take as input the same as the funnel query, diff --git a/posthog/hogql_queries/insights/insight_actors_query_options_runner.py b/posthog/hogql_queries/insights/insight_actors_query_options_runner.py index 570b95264b1d3..81cf2895d8c43 100644 --- a/posthog/hogql_queries/insights/insight_actors_query_options_runner.py +++ b/posthog/hogql_queries/insights/insight_actors_query_options_runner.py @@ -1,4 +1,3 @@ -from datetime import timedelta from typing import cast from posthog.hogql import ast @@ -34,6 +33,3 @@ def calculate(self) -> InsightActorsQueryOptionsResponse: return lifecycle_runner.to_actors_query_options() return InsightActorsQueryOptionsResponse(day=None, status=None, interval=None, breakdown=None, series=None) - - def _refresh_frequency(self): - return timedelta(minutes=1) diff --git a/posthog/hogql_queries/insights/insight_actors_query_runner.py b/posthog/hogql_queries/insights/insight_actors_query_runner.py index 6cd075a57504d..8b3e27df514a5 100644 --- a/posthog/hogql_queries/insights/insight_actors_query_runner.py +++ b/posthog/hogql_queries/insights/insight_actors_query_runner.py @@ -1,4 +1,3 @@ -from datetime import timedelta from typing import cast, Optional from posthog.hogql import ast @@ -116,6 +115,3 @@ def calculate(self) -> HogQLQueryResponse: modifiers=self.modifiers, limit_context=self.limit_context, ) - - def _refresh_frequency(self): - return timedelta(minutes=1) diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 8ea3e2e806e79..bd63da7604dfb 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from datetime import datetime +from datetime import datetime, timedelta, timezone from enum import IntEnum from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard from zoneinfo import ZoneInfo @@ -52,6 +52,7 @@ WebStatsTableQuery, WebTopClicksQuery, QueryStatusResponse, + GenericCachedQueryResponse, ) from posthog.schema_helpers import to_dict, to_json from posthog.utils import generate_cache_key, get_from_dict_or_attr, get_safe_cache @@ -70,16 +71,20 @@ labelnames=[LABEL_TEAM_ID, "cache_hit"], ) +EXTENDED_CACHE_AGE = timedelta(days=1) + class ExecutionMode(IntEnum): # Keep integer values the same for Celery's sake - CALCULATE_BLOCKING_ALWAYS = 4 + CALCULATE_BLOCKING_ALWAYS = 5 """Always recalculate.""" - CALCULATE_ASYNC_ALWAYS = 3 + CALCULATE_ASYNC_ALWAYS = 4 """Always kick off async calculation.""" - RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE = 2 + RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE = 3 """Use cache, unless the results are missing or stale.""" - RECENT_CACHE_CALCULATE_ASYNC_IF_STALE = 1 + RECENT_CACHE_CALCULATE_ASYNC_IF_STALE = 2 """Use cache, kick off async calculation when results are missing or stale.""" + EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE = 1 + """Use cache for longer, kick off async calculation when results are missing or stale.""" CACHE_ONLY_NEVER_CALCULATE = 0 """Do not initiate calculation.""" @@ -88,6 +93,7 @@ def execution_mode_from_refresh(refresh_requested: bool | str | None) -> Executi refresh_map = { "blocking": ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, "async": ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE, + "lazy_async": ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, "force_async": ExecutionMode.CALCULATE_ASYNC_ALWAYS, "force_blocking": ExecutionMode.CALCULATE_BLOCKING_ALWAYS, "force_cache": ExecutionMode.CACHE_ONLY_NEVER_CALCULATE, @@ -347,7 +353,7 @@ def get_query_runner_or_none( R = TypeVar("R", bound=BaseModel) # CR (for CachedResponse) must be R extended with CachedQueryResponseMixin # Unfortunately inheritance is also not a thing here, because we lose this info in the schema.ts->.json->.py journey -CR = TypeVar("CR", bound=BaseModel) +CR = TypeVar("CR", bound=GenericCachedQueryResponse) class QueryRunner(ABC, Generic[Q, R, CR]): @@ -460,13 +466,23 @@ def handle_cache_and_async_logic( query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user) cached_response.query_status = query_status_response.query_status return cached_response + elif execution_mode == ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE: + # We're allowed to calculate if the cache is older than 24 hours, but we'll do it asynchronously + assert isinstance(cached_response, CachedResponse) + if datetime.now(timezone.utc) - cached_response.last_refresh > EXTENDED_CACHE_AGE: + query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user) + cached_response.query_status = query_status_response.query_status + return cached_response else: QUERY_CACHE_HIT_COUNTER.labels(team_id=self.team.pk, cache_hit="miss").inc() # We have no cached result. If we aren't allowed to calculate, let's return the cache miss # – otherwise let's proceed to calculation if execution_mode == ExecutionMode.CACHE_ONLY_NEVER_CALCULATE: return cached_response - elif execution_mode == ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE: + elif execution_mode in ( + ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE, + ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, + ): # We're allowed to calculate, but we'll do it asynchronously query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user) cached_response.query_status = query_status_response.query_status @@ -495,24 +511,24 @@ def run( if results is not None: return results - fresh_response_dict = self.calculate().model_dump() - fresh_response_dict["is_cached"] = False - fresh_response_dict["last_refresh"] = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") - fresh_response_dict["next_allowed_client_refresh"] = (datetime.now() + self._refresh_frequency()).strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - fresh_response_dict["cache_key"] = cache_key - fresh_response_dict["timezone"] = self.team.timezone + fresh_response_dict = { + **self.calculate().model_dump(), + "is_cached": False, + "last_refresh": datetime.now(timezone.utc), + "next_allowed_client_refresh": datetime.now(timezone.utc) + self._refresh_frequency(), + "cache_key": cache_key, + "timezone": self.team.timezone, + } fresh_response = CachedResponse(**fresh_response_dict) - # Dont cache debug queries with errors and export queries + # Don't cache debug queries with errors and export queries has_error: Optional[list] = fresh_response_dict.get("error", None) - if (has_error is None or len(has_error) == 0) and self.limit_context != LimitContext.EXPORT: - # TODO: Use JSON serializer in general for redis cache + cache_ttl = self.cache_ttl() + if (has_error is None or len(has_error) == 0) and self.limit_context != LimitContext.EXPORT and cache_ttl > 0: fresh_response_serialized = OrjsonJsonSerializer({}).dumps(fresh_response.model_dump()) - cache.set(cache_key, fresh_response_serialized, settings.CACHED_RESULTS_TTL) + cache.set(cache_key, fresh_response_serialized, cache_ttl) + QUERY_CACHE_WRITE_COUNTER.labels(team_id=self.team.pk).inc() - QUERY_CACHE_WRITE_COUNTER.labels(team_id=self.team.pk).inc() return fresh_response @abstractmethod @@ -554,9 +570,11 @@ def _is_stale(self, cached_result_package): # Default is to have the result valid for at 1 minute return is_stale(self.team, datetime.now(tz=ZoneInfo("UTC")), "minute", cached_result_package) - @abstractmethod - def _refresh_frequency(self): - raise NotImplementedError() + def _refresh_frequency(self) -> timedelta: + return timedelta(minutes=1) + + def cache_ttl(self) -> float: + return settings.CACHED_RESULTS_TTL def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): """Irreversably update self.query with provided dashboard filters.""" diff --git a/posthog/hogql_queries/sessions_timeline_query_runner.py b/posthog/hogql_queries/sessions_timeline_query_runner.py index e9a49ecbe981b..04c5542c8b3e7 100644 --- a/posthog/hogql_queries/sessions_timeline_query_runner.py +++ b/posthog/hogql_queries/sessions_timeline_query_runner.py @@ -1,4 +1,3 @@ -from datetime import timedelta import json from typing import cast from posthog.api.element import ElementSerializer @@ -181,6 +180,3 @@ def calculate(self) -> SessionsTimelineQueryResponse: timings=self.timings.to_list(), hogql=query_result.hogql, ) - - def _refresh_frequency(self): - return timedelta(minutes=1) # TODO: Make sure this is cached diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py index 96003badffd03..a20d2930809d4 100644 --- a/posthog/hogql_queries/test/test_query_runner.py +++ b/posthog/hogql_queries/test/test_query_runner.py @@ -3,7 +3,6 @@ from unittest import mock from zoneinfo import ZoneInfo -from dateutil.parser import isoparse from freezegun import freeze_time from pydantic import BaseModel @@ -49,9 +48,7 @@ def _refresh_frequency(self) -> timedelta: return timedelta(minutes=4) def _is_stale(self, cached_result_package) -> bool: - return isoparse(cached_result_package.last_refresh) + timedelta(minutes=10) <= datetime.now( - tz=ZoneInfo("UTC") - ) + return cached_result_package.last_refresh + timedelta(minutes=10) <= datetime.now(tz=ZoneInfo("UTC")) TestQueryRunner.__abstractmethods__ = frozenset() @@ -153,8 +150,8 @@ def test_cache_response(self, mock_on_commit): response = runner.run(execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE) self.assertIsInstance(response, TestCachedBasicQueryResponse) self.assertEqual(response.is_cached, False) - self.assertEqual(response.last_refresh, "2023-02-04T13:37:42Z") - self.assertEqual(response.next_allowed_client_refresh, "2023-02-04T13:41:42Z") + self.assertEqual(response.last_refresh.isoformat(), "2023-02-04T13:37:42+00:00") + self.assertEqual(response.next_allowed_client_refresh.isoformat(), "2023-02-04T13:41:42+00:00") # returns cached response afterwards response = runner.run(execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE) @@ -186,6 +183,21 @@ def test_cache_response(self, mock_on_commit): self.assertEqual(response.is_cached, True) mock_on_commit.assert_called_once() # still once + with freeze_time(datetime(2023, 2, 4, 23, 55, 42)): + # returns cached response for extended time + response = runner.run(execution_mode=ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE) + self.assertIsInstance(response, TestCachedBasicQueryResponse) + self.assertEqual(response.is_cached, True) + mock_on_commit.assert_called_once() # still once + + mock_on_commit.reset_mock() + with freeze_time(datetime(2023, 2, 5, 23, 55, 42)): + # returns cached response for extended time but finally kicks off calculation in the background + response = runner.run(execution_mode=ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE) + self.assertIsInstance(response, TestCachedBasicQueryResponse) + self.assertEqual(response.is_cached, True) + mock_on_commit.assert_called_once() + def test_modifier_passthrough(self): try: from ee.clickhouse.materialized_columns.analyze import materialize diff --git a/posthog/schema.py b/posthog/schema.py index 91a96d1c96edd..01919ec7ed8a8 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -1016,11 +1016,11 @@ class TestCachedBasicQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1334,13 +1334,13 @@ class CachedActorsQueryResponse(BaseModel): hasMore: Optional[bool] = None hogql: str = Field(..., description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime limit: int missing_actors_count: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime offset: int query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." @@ -1366,12 +1366,12 @@ class CachedEventsQueryResponse(BaseModel): hasMore: Optional[bool] = None hogql: str = Field(..., description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime offset: Optional[int] = None query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." @@ -1397,12 +1397,12 @@ class CachedFunnelCorrelationResponse(BaseModel): hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime offset: Optional[int] = None query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." @@ -1426,11 +1426,11 @@ class CachedFunnelsQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1451,8 +1451,8 @@ class CachedInsightActorsQueryOptionsResponse(BaseModel): day: Optional[list[DayItem]] = None interval: Optional[list[IntervalItem]] = None is_cached: bool - last_refresh: str - next_allowed_client_refresh: str + last_refresh: AwareDatetime + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1472,11 +1472,11 @@ class CachedLifecycleQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1498,11 +1498,11 @@ class CachedPathsQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1525,11 +1525,11 @@ class CachedSessionsTimelineQueryResponse(BaseModel): hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1551,11 +1551,11 @@ class CachedStickinessQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1577,11 +1577,11 @@ class CachedTrendsQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1605,11 +1605,11 @@ class CachedWebOverviewQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1634,12 +1634,12 @@ class CachedWebStatsTableQueryResponse(BaseModel): hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime offset: Optional[int] = None query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." @@ -1665,11 +1665,11 @@ class CachedWebTopClicksQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) @@ -1976,6 +1976,17 @@ class FunnelsQueryResponse(BaseModel): ) +class GenericCachedQueryResponse(BaseModel): + cache_key: str + is_cached: bool + last_refresh: AwareDatetime + next_allowed_client_refresh: AwareDatetime + query_status: Optional[QueryStatus] = Field( + default=None, description="Query status indicates whether next to the provided data, a query is still running." + ) + timezone: str + + class GroupPropertyFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2747,13 +2758,13 @@ class CachedHogQLQueryResponse(BaseModel): hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime limit: Optional[int] = None metadata: Optional[HogQLMetadataResponse] = Field(default=None, description="Query metadata output") modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime offset: Optional[int] = None query: Optional[str] = Field(default=None, description="Input query string") query_status: Optional[QueryStatus] = Field( @@ -2778,11 +2789,11 @@ class CachedRetentionQueryResponse(BaseModel): ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") is_cached: bool - last_refresh: str + last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - next_allowed_client_refresh: str + next_allowed_client_refresh: AwareDatetime query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) diff --git a/posthog/session_recordings/queries/session_recording_list_from_filters.py b/posthog/session_recordings/queries/session_recording_list_from_filters.py index f87afbb38191f..fc0c3781946d5 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/session_recording_list_from_filters.py @@ -1,3 +1,4 @@ +import re from typing import Any, NamedTuple, cast, Optional from datetime import datetime, timedelta @@ -6,7 +7,7 @@ from posthog.hogql.parser import parse_select from posthog.hogql.property import entity_to_expr, property_to_expr from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator -from posthog.models import Team +from posthog.models import Team, Property from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.filters.mixins.utils import cached_property from posthog.models.property import PropertyGroup @@ -14,6 +15,18 @@ from posthog.session_recordings.queries.session_replay_events import ttl_days from posthog.constants import TREND_FILTER_TYPE_ACTIONS, PropertyOperatorType +import structlog + +logger = structlog.get_logger(__name__) + + +def is_event_property(p: Property) -> bool: + return p.type == "event" or (p.type == "hogql" and bool(re.search(r"(? bool: + return p.type == "person" or (p.type == "hogql" and "person.properties" in p.key) + class SessionRecordingQueryResult(NamedTuple): results: list @@ -158,8 +171,8 @@ def _where_predicates(self) -> ast.And: ) ) - if self._filter.entities: - events_sub_query = EventsSubQuery(self._team, self._filter, self.ttl_days).get_query() + events_sub_query = EventsSubQuery(self._team, self._filter, self.ttl_days).get_query() + if events_sub_query: exprs.append( ast.CompareOperation( op=ast.CompareOperationOp.In, @@ -179,9 +192,12 @@ def _where_predicates(self) -> ast.And: ) ) - non_person_properties = self._strip_person_properties(self._filter.property_groups) - if non_person_properties: - exprs.append(property_to_expr(non_person_properties, team=self._team, scope="replay")) + remaining_properties = self._strip_person_and_event_properties(self._filter.property_groups) + if remaining_properties: + logger.info( + "session_replay_query_builder has unhandled properties", unhandled_properties=remaining_properties + ) + exprs.append(property_to_expr(remaining_properties, team=self._team, scope="replay")) person_id_subquery = PersonsIdSubQuery(self._team, self._filter, self.ttl_days).get_query() if person_id_subquery: @@ -254,8 +270,10 @@ def _having_predicates(self) -> ast.And | Constant: return ast.And(exprs=exprs) if exprs else Constant(value=True) - def _strip_person_properties(self, property_group: PropertyGroup) -> PropertyGroup | None: - property_groups_to_keep = [g for g in property_group.flat if g.type != "person"] + def _strip_person_and_event_properties(self, property_group: PropertyGroup) -> PropertyGroup | None: + property_groups_to_keep = [ + g for g in property_group.flat if not is_event_property(g) and not is_person_property(g) + ] return ( PropertyGroup( @@ -294,7 +312,7 @@ def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: @cached_property def person_properties(self) -> PropertyGroup | None: - person_property_groups = [g for g in self._filter.property_groups.flat if g.type == "person" in g.type] + person_property_groups = [g for g in self._filter.property_groups.flat if is_person_property(g)] return ( PropertyGroup( type=PropertyOperatorType.AND, @@ -340,7 +358,7 @@ def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: @cached_property def person_properties(self) -> PropertyGroup | None: - person_property_groups = [g for g in self._filter.property_groups.flat if g.type == "person" in g.type] + person_property_groups = [g for g in self._filter.property_groups.flat if is_person_property(g)] return ( PropertyGroup( type=PropertyOperatorType.AND, @@ -392,14 +410,17 @@ def _event_predicates(self): return event_exprs, list(event_names) - def get_query(self): - return ast.SelectQuery( - select=[ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))], - select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), - where=self._where_predicates(), - having=self._having_predicates(), - group_by=[ast.Field(chain=["$session_id"])], - ) + def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: + if self._filter.entities or self.event_properties: + return ast.SelectQuery( + select=[ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), + where=self._where_predicates(), + having=self._having_predicates(), + group_by=[ast.Field(chain=["$session_id"])], + ) + else: + return None def _where_predicates(self) -> ast.Expr: exprs: list[ast.Expr] = [ @@ -445,6 +466,9 @@ def _where_predicates(self) -> ast.Expr: if event_where_exprs: exprs.append(ast.Or(exprs=event_where_exprs)) + if self.event_properties: + exprs.append(property_to_expr(self.event_properties, team=self._team, scope="replay")) + if self._filter.session_ids: exprs.append( ast.CompareOperation( @@ -470,3 +494,7 @@ def _having_predicates(self) -> ast.Expr: ) return ast.Constant(value=True) + + @cached_property + def event_properties(self): + return [g for g in self._filter.property_groups.flat if is_event_property(g)] diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr index ad01a1b0da409..d620633d8d16f 100644 --- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr @@ -1342,30 +1342,29 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true)) GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0)) + HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -1397,30 +1396,29 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) - JOIN - (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0))) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)) + GROUP BY events.`$session_id` + HAVING true)), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -1489,30 +1487,29 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true)) GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0)) + HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -1544,30 +1541,29 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) - JOIN - (SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null') AS `properties___$browser`, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0))) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0)) + GROUP BY events.`$session_id` + HAVING true)), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -2221,16 +2217,10 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, (SELECT person_distinct_ids.distinct_id AS distinct_id @@ -2249,7 +2239,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))), and(ifNull(equals(s__events.properties___is_internal_user, 'false'), 0), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0))) + WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -2318,16 +2308,10 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), true), and(ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, (SELECT person_distinct_ids.distinct_id AS distinct_id @@ -2346,7 +2330,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))), and(ifNull(equals(s__events.properties___is_internal_user, 'false'), 0), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0))) + WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3413,12 +3397,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3487,12 +3471,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3561,12 +3545,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3635,12 +3619,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3709,12 +3693,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'true'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3783,12 +3767,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - JOIN - (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, - events.`$session_id` AS `$session_id` - FROM events PREWHERE ifNull(greaterOrEquals(events.timestamp, minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))), 0) - WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'true'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING true))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3857,25 +3841,24 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC @@ -3944,25 +3927,24 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS s__pdi___person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS s__pdi__person ON equals(s__pdi.s__pdi___person_id, s__pdi__person.id) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__pdi__person.properties___email, 'bla'), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0)))) GROUP BY s.session_id HAVING true ORDER BY start_time DESC diff --git a/posthog/utils.py b/posthog/utils.py index f516abe53b24d..18f271c4a5d56 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -1023,6 +1023,7 @@ def refresh_requested_by_client(request: Request) -> bool | str: "force_async", "force_blocking", "force_cache", + "lazy_async", ], )