Skip to content

Commit

Permalink
Ch.Result: drop :headers, add :data
Browse files Browse the repository at this point in the history
  • Loading branch information
ruslandoga committed Dec 29, 2023
1 parent bfefa7e commit b3cbb76
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 49 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
## Unreleased

- move rows payload (RowBinary, CSV, etc.) to SQL statement and remove pseudo-positional binds, making param names explicit https://github.com/plausible/ch/pull/143
- drop `:headers` from `%Ch.Result{}` but add `:data`

## 0.2.2 (2023-12-23)

Expand Down
6 changes: 3 additions & 3 deletions lib/ch/query.ex
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ defimpl DBConnection.Query, for: Ch.Query do
cond do
decode and format == "RowBinaryWithNamesAndTypes" ->
rows = data |> IO.iodata_to_binary() |> RowBinary.decode_rows()
%Result{num_rows: length(rows), rows: rows, headers: headers, command: command}
%Result{num_rows: length(rows), rows: rows, data: data, command: command}

format == nil ->
num_rows =
Expand All @@ -103,10 +103,10 @@ defimpl DBConnection.Query, for: Ch.Query do
String.to_integer(written_rows)
end

%Result{num_rows: num_rows, headers: headers, command: command}
%Result{num_rows: num_rows, data: data, command: command}

true ->
%Result{rows: data, headers: headers, command: command}
%Result{data: data, command: command}
end
end

Expand Down
20 changes: 9 additions & 11 deletions lib/ch/result.ex
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,19 @@ defmodule Ch.Result do
@moduledoc """
Result struct returned from any successful query. Its fields are:
* `command` - An atom of the query command, for example: `:select`, `:insert`;
* `rows` - The result set. One of:
- a list of lists, each inner list corresponding to a
row, each element in the inner list corresponds to a column;
- raw iodata when the response is not automatically decoded, e.g. `x-clickhouse-format: CSV`
* `num_rows` - The number of fetched or affected rows;
* `headers` - The HTTP response headers
* `command` - An atom of the query command, for example: `:select`, `:insert`
* `num_rows` - The number of fetched or affected rows
* `rows` - A list of lists, each inner list corresponding to a row, each element in the inner list corresponds to a column
* `data` - The raw iodata from the response
"""

defstruct [:command, :num_rows, :rows, :headers]
defstruct [:command, :num_rows, :rows, :data]

@type t :: %__MODULE__{
command: atom,
command: Ch.Query.command() | nil,
num_rows: non_neg_integer | nil,
rows: [[term]] | iodata | nil,
headers: Mint.Types.headers()
rows: [[term]] | nil,
data: iodata
}
end
28 changes: 13 additions & 15 deletions test/ch/connection_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,12 @@ defmodule Ch.ConnectionTest do
# datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default
# see https://clickhouse.com/docs/en/sql-reference/data-types/datetime
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} =
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]]}} =
Ch.query(conn, "select {naive:DateTime}", %{"naive" => naive_noon})

# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
{_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0)
%Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()")

assert naive_datetime ==
naive_noon
Expand Down Expand Up @@ -720,8 +720,7 @@ defmodule Ch.ConnectionTest do
# datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default
# see https://clickhouse.com/docs/en/sql-reference/data-types/datetime
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok,
%{num_rows: 1, rows: [[naive_datetime, "2022-12-12 12:00:00"]], headers: headers}} =
assert {:ok, %{num_rows: 1, rows: [[naive_datetime, "2022-12-12 12:00:00"]]}} =
Ch.query(
conn,
"select {naive:DateTime} as d, toString(d)",
Expand All @@ -730,7 +729,7 @@ defmodule Ch.ConnectionTest do

# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
{_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0)
%Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()")

assert naive_datetime ==
naive_noon
Expand Down Expand Up @@ -910,7 +909,7 @@ defmodule Ch.ConnectionTest do
# datetimes in params are sent in text and ClickHouse translates them to UTC from server timezone by default
# see https://clickhouse.com/docs/en/sql-reference/data-types/datetime
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} =
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]]}} =
Ch.query(
conn,
"select {naive:DateTime64(#{precision})}",
Expand All @@ -919,7 +918,7 @@ defmodule Ch.ConnectionTest do

# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
{_, timezone} = List.keyfind!(headers, "x-clickhouse-timezone", 0)
%Ch.Result{rows: [[timezone]]} = Ch.query!(conn, "select timezone()")

expected =
naive_noon
Expand Down Expand Up @@ -1066,12 +1065,11 @@ defmodule Ch.ConnectionTest do
[{20.0, 20.0}, "Point"]
]

# to make our RowBinary is not garbage in garbage out we also test a text format response
assert conn
|> Ch.query!(
# to make sure our RowBinary is not "garbage in, garbage out" we also test a "text format" response
assert Ch.query!(
conn,
"SELECT p, toTypeName(p) FROM geo_point ORDER BY p ASC FORMAT JSONCompact"
)
|> Map.fetch!(:rows)
).data
|> Jason.decode!()
|> Map.fetch!("data") == [
[[10, 10], "Point"],
Expand Down Expand Up @@ -1108,7 +1106,7 @@ defmodule Ch.ConnectionTest do
assert Ch.query!(
conn,
"SELECT r, toTypeName(r) FROM geo_ring ORDER BY r ASC FORMAT JSONCompact"
).rows
).data
|> Jason.decode!()
|> Map.fetch!("data") == [
[[[0, 0], [10, 0], [10, 10], [0, 10]], "Ring"],
Expand Down Expand Up @@ -1160,7 +1158,7 @@ defmodule Ch.ConnectionTest do
assert Ch.query!(
conn,
"SELECT pg, toTypeName(pg) FROM geo_polygon ORDER BY pg ASC FORMAT JSONCompact"
).rows
).data
|> Jason.decode!()
|> Map.fetch!("data") == [
[[[[0, 1], [10, 3.2]], [], [[2, 2]]], "Polygon"],
Expand Down Expand Up @@ -1242,7 +1240,7 @@ defmodule Ch.ConnectionTest do
assert Ch.query!(
conn,
"SELECT mpg, toTypeName(mpg) FROM geo_multipolygon ORDER BY mpg ASC FORMAT JSONCompact"
).rows
).data
|> Jason.decode!()
|> Map.fetch!("data") == [
[
Expand Down
3 changes: 1 addition & 2 deletions test/ch/faults_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -468,8 +468,7 @@ defmodule Ch.FaultsTest do
test = self()

header = "X-ClickHouse-Server-Display-Name"
{:ok, %Result{headers: headers}} = Ch.Test.sql_exec("select 1")
{_, expected_name} = List.keyfind!(headers, String.downcase(header), 0)
{:ok, %Result{rows: [[expected_name]]}} = Ch.Test.sql_exec("select hostName()")

log =
capture_async_log(fn ->
Expand Down
24 changes: 6 additions & 18 deletions test/ch/headers_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -7,50 +7,38 @@ defmodule Ch.HeadersTest do
end

test "can request gzipped response through headers", %{conn: conn} do
assert {:ok, %{rows: rows, headers: headers}} =
assert {:ok, %{data: data}} =
Ch.query(conn, "select number from system.numbers limit 100", [],
decode: false,
settings: [enable_http_compression: 1],
headers: [{"accept-encoding", "gzip"}]
)

assert :proplists.get_value("content-type", headers) == "application/octet-stream"
assert :proplists.get_value("content-encoding", headers) == "gzip"
assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes"

# https://en.wikipedia.org/wiki/Gzip
assert <<0x1F, 0x8B, _rest::bytes>> = IO.iodata_to_binary(rows)
assert <<0x1F, 0x8B, _rest::bytes>> = IO.iodata_to_binary(data)
end

test "can request lz4 response through headers", %{conn: conn} do
assert {:ok, %{rows: rows, headers: headers}} =
assert {:ok, %{data: data}} =
Ch.query(conn, "select number from system.numbers limit 100", [],
decode: false,
settings: [enable_http_compression: 1],
headers: [{"accept-encoding", "lz4"}]
)

assert :proplists.get_value("content-type", headers) == "application/octet-stream"
assert :proplists.get_value("content-encoding", headers) == "lz4"
assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes"

# https://en.wikipedia.org/wiki/LZ4_(compression_algorithm)
assert <<0x04, 0x22, 0x4D, 0x18, _rest::bytes>> = IO.iodata_to_binary(rows)
assert <<0x04, 0x22, 0x4D, 0x18, _rest::bytes>> = IO.iodata_to_binary(data)
end

test "can request zstd response through headers", %{conn: conn} do
assert {:ok, %{rows: rows, headers: headers}} =
assert {:ok, %{data: data}} =
Ch.query(conn, "select number from system.numbers limit 100", [],
decode: false,
settings: [enable_http_compression: 1],
headers: [{"accept-encoding", "zstd"}]
)

assert :proplists.get_value("content-type", headers) == "application/octet-stream"
assert :proplists.get_value("content-encoding", headers) == "zstd"
assert :proplists.get_value("x-clickhouse-format", headers) == "RowBinaryWithNamesAndTypes"

# https://en.wikipedia.org/wiki/LZ4_(compression_algorithm)
assert <<0x28, 0xB5, 0x2F, 0xFD, _rest::bytes>> = IO.iodata_to_binary(rows)
assert <<0x28, 0xB5, 0x2F, 0xFD, _rest::bytes>> = IO.iodata_to_binary(data)
end
end

0 comments on commit b3cbb76

Please sign in to comment.