diff --git a/CHANGELOG.md b/CHANGELOG.md index 48cfdf8..161ae4f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## Unreleased - move rows payload (RowBinary, CSV, etc.) to SQL statement and remove pseudo-positional binds, making param names explicit https://github.com/plausible/ch/pull/143 +- drop `:headers` from `%Ch.Result{}` but add `:data` ## 0.2.2 (2023-12-23) diff --git a/lib/ch/query.ex b/lib/ch/query.ex index 65b3ad8..2685e70 100644 --- a/lib/ch/query.ex +++ b/lib/ch/query.ex @@ -94,7 +94,7 @@ defimpl DBConnection.Query, for: Ch.Query do cond do decode and format == "RowBinaryWithNamesAndTypes" -> rows = data |> IO.iodata_to_binary() |> RowBinary.decode_rows() - %Result{num_rows: length(rows), rows: rows, headers: headers, command: command} + %Result{num_rows: length(rows), rows: rows, data: data, command: command} format == nil -> num_rows = @@ -103,10 +103,10 @@ defimpl DBConnection.Query, for: Ch.Query do String.to_integer(written_rows) end - %Result{num_rows: num_rows, headers: headers, command: command} + %Result{num_rows: num_rows, data: data, command: command} true -> - %Result{rows: data, headers: headers, command: command} + %Result{data: data, command: command} end end diff --git a/lib/ch/result.ex b/lib/ch/result.ex index ddca4df..2977aa9 100644 --- a/lib/ch/result.ex +++ b/lib/ch/result.ex @@ -2,21 +2,19 @@ defmodule Ch.Result do @moduledoc """ Result struct returned from any successful query. Its fields are: - * `command` - An atom of the query command, for example: `:select`, `:insert`; - * `rows` - The result set. One of: - - a list of lists, each inner list corresponding to a - row, each element in the inner list corresponds to a column; - - raw iodata when the response is not automatically decoded, e.g. `x-clickhouse-format: CSV` - * `num_rows` - The number of fetched or affected rows; - * `headers` - The HTTP response headers + * `command` - An atom of the query command, for example: `:select`, `:insert` + * `num_rows` - The number of fetched or affected rows + * `rows` - A list of lists, each inner list corresponding to a row, each element in the inner list corresponds to a column + * `data` - The raw iodata from the response + """ - defstruct [:command, :num_rows, :rows, :headers] + defstruct [:command, :num_rows, :rows, :data] @type t :: %__MODULE__{ - command: atom, + command: Ch.Query.command() | nil, num_rows: non_neg_integer | nil, - rows: [[term]] | iodata | nil, - headers: Mint.Types.headers() + rows: [[term]] | nil, + data: iodata } end diff --git a/test/ch/connection_test.exs b/test/ch/connection_test.exs index 89f1140..aa99a4a 100644 --- a/test/ch/connection_test.exs +++ b/test/ch/connection_test.exs @@ -47,12 +47,12 @@ defmodule Ch.ConnectionTest do # datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default # see https://clickhouse.com/docs/en/sql-reference/data-types/datetime # https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/ - assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} = + assert {:ok, %{num_rows: 1, rows: [[naive_datetime]]}} = Ch.query(conn, "select {naive:DateTime}", %{"naive" => naive_noon}) # to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse # i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result - {_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0) + %Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()") assert naive_datetime == naive_noon @@ -720,8 +720,7 @@ defmodule Ch.ConnectionTest do # datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default # see https://clickhouse.com/docs/en/sql-reference/data-types/datetime # https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/ - assert {:ok, - %{num_rows: 1, rows: [[naive_datetime, "2022-12-12 12:00:00"]], headers: headers}} = + assert {:ok, %{num_rows: 1, rows: [[naive_datetime, "2022-12-12 12:00:00"]]}} = Ch.query( conn, "select {naive:DateTime} as d, toString(d)", @@ -730,7 +729,7 @@ defmodule Ch.ConnectionTest do # to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse # i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result - {_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0) + %Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()") assert naive_datetime == naive_noon @@ -910,7 +909,7 @@ defmodule Ch.ConnectionTest do # datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default # see https://clickhouse.com/docs/en/sql-reference/data-types/datetime # https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/ - assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} = + assert {:ok, %{num_rows: 1, rows: [[naive_datetime]]}} = Ch.query( conn, "select {naive:DateTime64(#{precision})}", @@ -919,7 +918,7 @@ defmodule Ch.ConnectionTest do # to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse # i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result - {_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0) + %Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()") expected = naive_noon @@ -1066,12 +1065,11 @@ defmodule Ch.ConnectionTest do [{20.0, 20.0}, "Point"] ] - # to make our RowBinary is not garbage in garbage out we also test a text format response - assert conn - |> Ch.query!( + # to make sure our RowBinary is not "garbage in, garbage out" we also test a "text format" response + assert Ch.query!( + conn, "SELECT p, toTypeName(p) FROM geo_point ORDER BY p ASC FORMAT JSONCompact" - ) - |> Map.fetch!(:rows) + ).data |> Jason.decode!() |> Map.fetch!("data") == [ [[10, 10], "Point"], @@ -1108,7 +1106,7 @@ defmodule Ch.ConnectionTest do assert Ch.query!( conn, "SELECT r, toTypeName(r) FROM geo_ring ORDER BY r ASC FORMAT JSONCompact" - ).rows + ).data |> Jason.decode!() |> Map.fetch!("data") == [ [[[0, 0], [10, 0], [10, 10], [0, 10]], "Ring"], @@ -1160,7 +1158,7 @@ defmodule Ch.ConnectionTest do assert Ch.query!( conn, "SELECT pg, toTypeName(pg) FROM geo_polygon ORDER BY pg ASC FORMAT JSONCompact" - ).rows + ).data |> Jason.decode!() |> Map.fetch!("data") == [ [[[[0, 1], [10, 3.2]], [], [[2, 2]]], "Polygon"], @@ -1242,7 +1240,7 @@ defmodule Ch.ConnectionTest do assert Ch.query!( conn, "SELECT mpg, toTypeName(mpg) FROM geo_multipolygon ORDER BY mpg ASC FORMAT JSONCompact" - ).rows + ).data |> Jason.decode!() |> Map.fetch!("data") == [ [ diff --git a/test/ch/faults_test.exs b/test/ch/faults_test.exs index 264612a..bdf8d34 100644 --- a/test/ch/faults_test.exs +++ b/test/ch/faults_test.exs @@ -468,8 +468,7 @@ defmodule Ch.FaultsTest do test = self() header = "X-ClickHouse-Server-Display-Name" - {:ok, %Result{headers: headers}} = Ch.Test.sql_exec("select 1") - {_, expected_name} = List.keyfind!(headers, String.downcase(header), 0) + {:ok, %Result{rows: [[expected_name]]}} = Ch.Test.sql_exec("select hostName()") log = capture_async_log(fn -> diff --git a/test/ch/headers_test.exs b/test/ch/headers_test.exs index 52aa7c3..18c8b98 100644 --- a/test/ch/headers_test.exs +++ b/test/ch/headers_test.exs @@ -7,50 +7,38 @@ defmodule Ch.HeadersTest do end test "can request gzipped response through headers", %{conn: conn} do - assert {:ok, %{rows: rows, headers: headers}} = + assert {:ok, %{data: data}} = Ch.query(conn, "select number from system.numbers limit 100", [], decode: false, settings: [enable_http_compression: 1], headers: [{"accept-encoding", "gzip"}] ) - assert :proplists.get_value("content-type", headers) == "application/octet-stream" - assert :proplists.get_value("content-encoding", headers) == "gzip" - assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes" - # https://en.wikipedia.org/wiki/Gzip - assert <<0x1F, 0x8B, _rest::bytes>> = IO.iodata_to_binary(rows) + assert <<0x1F, 0x8B, _rest::bytes>> = IO.iodata_to_binary(data) end test "can request lz4 response through headers", %{conn: conn} do - assert {:ok, %{rows: rows, headers: headers}} = + assert {:ok, %{data: data}} = Ch.query(conn, "select number from system.numbers limit 100", [], decode: false, settings: [enable_http_compression: 1], headers: [{"accept-encoding", "lz4"}] ) - assert :proplists.get_value("content-type", headers) == "application/octet-stream" - assert :proplists.get_value("content-encoding", headers) == "lz4" - assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes" - # https://en.wikipedia.org/wiki/LZ4_(compression_algorithm) - assert <<0x04, 0x22, 0x4D, 0x18, _rest::bytes>> = IO.iodata_to_binary(rows) + assert <<0x04, 0x22, 0x4D, 0x18, _rest::bytes>> = IO.iodata_to_binary(data) end test "can request zstd response through headers", %{conn: conn} do - assert {:ok, %{rows: rows, headers: headers}} = + assert {:ok, %{data: data}} = Ch.query(conn, "select number from system.numbers limit 100", [], decode: false, settings: [enable_http_compression: 1], headers: [{"accept-encoding", "zstd"}] ) - assert :proplists.get_value("content-type", headers) == "application/octet-stream" - assert :proplists.get_value("content-encoding", headers) == "zstd" - assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes" - # https://en.wikipedia.org/wiki/LZ4_(compression_algorithm) - assert <<0x28, 0xB5, 0x2F, 0xFD, _rest::bytes>> = IO.iodata_to_binary(rows) + assert <<0x28, 0xB5, 0x2F, 0xFD, _rest::bytes>> = IO.iodata_to_binary(data) end end