Skip to content

Commit

Permalink
a
Browse files Browse the repository at this point in the history
  • Loading branch information
zy-kkk committed Nov 21, 2024
1 parent 3352d84 commit 3992643
Show file tree
Hide file tree
Showing 8 changed files with 275 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -263,3 +263,58 @@ CREATE TABLE catalog_pg_test.num_zero (
id varchar(20) NULL,
num numeric NULL
);

CREATE TABLE catalog_pg_test.test_all_types_array (
id int4 NOT NULL,
char_value _bpchar NULL,
varchar_value _varchar NULL,
date_value _date NULL,
smallint_value _int2 NULL,
int_value _int4 NULL,
bigint_value _int8 NULL,
timestamp_value _timestamp NULL,
decimal_value _numeric NULL,
bit_value _bit NULL,
real_value _float4 NULL,
cidr_value _cidr NULL,
inet_value _inet NULL,
macaddr_value _macaddr NULL,
bitn_value _bit NULL,
bitnv_value _varbit NULL,
jsonb_value _jsonb NULL,
point_value _point NULL,
line_value _line NULL,
lseg_value _lseg NULL,
box_value _box NULL,
path_value _path NULL,
polygon_value _polygon NULL,
circle_value _circle NULL
);


CREATE TABLE catalog_pg_test.test_all_types_2d_array (
id int4 NOT NULL,
char_value _bpchar NULL,
varchar_value _varchar NULL,
date_value _date NULL,
smallint_value _int2 NULL,
int_value _int4 NULL,
bigint_value _int8 NULL,
timestamp_value _timestamp NULL,
decimal_value _numeric NULL,
bit_value _bit NULL,
real_value _float4 NULL,
cidr_value _cidr NULL,
inet_value _inet NULL,
macaddr_value _macaddr NULL,
bitn_value _bit NULL,
bitnv_value _varbit NULL,
jsonb_value _jsonb NULL,
point_value _point NULL,
line_value _line NULL,
lseg_value _lseg NULL,
box_value _box NULL,
path_value _path NULL,
polygon_value _polygon NULL,
circle_value _circle NULL
);
69 changes: 69 additions & 0 deletions docker/thirdparties/docker-compose/postgresql/init/04-insert.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2730,3 +2730,72 @@ values('1','A','2020-09-02 00:00:00')
INSERT INTO catalog_pg_test.num_zero (id, num) VALUES ('001', 123.45);
INSERT INTO catalog_pg_test.num_zero (id, num) VALUES ('002', 678.90);
INSERT INTO catalog_pg_test.num_zero (id, num) VALUES ('003', 1011.12);

INSERT INTO catalog_pg_test.test_all_types_2d_array VALUES
(1,
ARRAY[ARRAY['abc', 'def']::text[], ARRAY['ghi', 'jkl']::text[]],
ARRAY[ARRAY['abc', 'def']::text[], ARRAY['ghi', 'jkl']::text[]],
ARRAY[ARRAY['2022-10-11'::date, '2022-11-12'::date], ARRAY['2023-01-01'::date, '2023-02-02'::date]],
ARRAY[ARRAY[1, 2], ARRAY[3, 4]],
ARRAY[ARRAY[1, 2], ARRAY[2, 3]],
ARRAY[ARRAY[3, 4], ARRAY[4, 5]],
ARRAY[ARRAY['2022-10-22 10:59:59'::timestamp, '2023-01-01 12:00:00'::timestamp],
ARRAY['2023-02-01 14:30:00'::timestamp, '2023-02-10 16:45:00'::timestamp]],
ARRAY[ARRAY[34.123, 45.678], ARRAY[56.789, 67.890]],
ARRAY[ARRAY[0::bit, 1::bit], ARRAY[1::bit, 0::bit]],
ARRAY[ARRAY[12.123456, 34.567890], ARRAY[56.789123, 45.678345]],
ARRAY[ARRAY['10.16.10.14'::cidr, '192.168.1.1'::cidr],
ARRAY['172.16.0.1'::cidr, '192.168.2.2'::cidr]],
ARRAY[ARRAY['10.16.10.14'::inet, '192.168.1.1'::inet],
ARRAY['172.16.0.1'::inet, '192.168.2.2'::inet]],
ARRAY[ARRAY['ff:ff:ff:ff:ff:AA'::macaddr, '00:14:22:01:23:45'::macaddr],
ARRAY['11:22:33:44:55:66'::macaddr, '77:88:99:00:11:22'::macaddr]],
ARRAY[ARRAY['1010101010'::bit(10), '1100101010'::bit(10)],
ARRAY['1110001111'::bit(10), '0001110000'::bit(10)]],
ARRAY[ARRAY[cast(10 as bit(5)), cast(20 as bit(5))],
ARRAY[cast(30 as bit(5)), cast(40 as bit(5))]],
ARRAY[ARRAY['{"id":1}'::jsonb, '{"id":2}'::jsonb],
ARRAY['{"id":3}'::jsonb, '{"id":4}'::jsonb]],
ARRAY[ARRAY['(1,1)'::point, '(2,2)'::point],
ARRAY['(3,3)'::point, '(4,4)'::point]],
ARRAY[ARRAY['{1,1,1}'::line, '{2,2,2}'::line],
ARRAY['{3,3,3}'::line, '{4,4,4}'::line]],
ARRAY[ARRAY['(1,1),(2,2)'::lseg, '(3,3),(4,4)'::lseg],
ARRAY['(5,5),(6,6)'::lseg, '(7,7),(8,8)'::lseg]],
ARRAY[ARRAY['(1,1),(2,2)'::box, '(5,5),(6,6)'::box],
ARRAY['(7,7),(8,8)'::box, '(9,9),(10,10)'::box]],
ARRAY[ARRAY['(1,1),(2,2),(2,1)'::path, '(3,3),(4,4),(5,5)'::path],
ARRAY['(6,6),(7,7),(8,8)'::path, '(9,9),(10,10),(11,11)'::path]],
ARRAY[ARRAY['((1,1),(2,2),(2,1))'::polygon, '((3,3),(4,4),(5,5))'::polygon],
ARRAY['((6,6),(7,7),(8,8))'::polygon, '((9,9),(10,10),(11,11))'::polygon]],
ARRAY[ARRAY['<(0,0),1>'::circle, '<(5,5),10>'::circle],
ARRAY['<(2,2),3>'::circle, '<(7,7),14>'::circle]]
);

INSERT INTO catalog_pg_test.test_all_types_array VALUES
(1,
ARRAY['abc', 'def'],
ARRAY['ghi', 'jkl'],
ARRAY['2022-10-11'::date, '2022-11-12'::date],
ARRAY[1, 2],
ARRAY[2, 3],
ARRAY[3, 4],
ARRAY['2022-10-22 10:59:59'::timestamp, '2023-01-01 12:00:00'::timestamp],
ARRAY[34.123, 45.678],
ARRAY[0::bit, 1::bit],
ARRAY[12.123456, 34.567890],
ARRAY['10.16.10.14'::cidr, '192.168.1.1'::cidr],
ARRAY['10.16.10.14'::inet, '192.168.0.1'::inet],
ARRAY['ff:ff:ff:ff:ff:AA'::macaddr, '00:14:22:01:23:45'::macaddr],
ARRAY['1010101010'::bit(10), '1100101010'::bit(10)],
ARRAY[cast('00010' as bit(5)), cast('10100' as bit(5))], -- bit(5) values need 5 bits
ARRAY['{"id":1}'::jsonb, '{"id":2}'::jsonb],
ARRAY['(1,1)'::point, '(2,2)'::point],
ARRAY['{1,1,1}'::line, '{2,2,2}'::line],
ARRAY['(1,1),(2,2)'::lseg, '(3,3),(4,4)'::lseg],
ARRAY['(1,1),(2,2)'::box, '(5,5),(6,6)'::box],
ARRAY['(1,1),(2,2),(2,1)'::path, '(3,3),(4,4),(5,5)'::path],
ARRAY['((1,1),(2,2),(2,1))'::polygon, '((3,3),(4,4),(5,5))'::polygon],
ARRAY['<(0,0),1>'::circle, '<(5,5),10>'::circle]
);

Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,19 @@
import org.apache.doris.common.jni.vec.ColumnValueConverter;
import org.apache.doris.common.jni.vec.VectorTable;

import org.apache.log4j.Logger;

import java.math.BigDecimal;
import java.sql.Array;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;

public class PostgreSQLJdbcExecutor extends BaseJdbcExecutor {
private static final Logger LOG = Logger.getLogger(PostgreSQLJdbcExecutor.class);

public PostgreSQLJdbcExecutor(byte[] thriftParams) throws Exception {
super(thriftParams);
}
Expand Down Expand Up @@ -106,6 +111,15 @@ protected ColumnValueConverter getOutputConverter(ColumnType columnType, String
return timeToString((java.sql.Time) input);
} else if (input instanceof byte[]) {
return pgByteArrayToHexString((byte[]) input);
} else if (input instanceof Array) {
try {
Array pgArray = (Array) input;
Object array = pgArray.getArray();
return handleNestedArray(array);
} catch (SQLException e) {
LOG.warn("Failed to convert pg array to string, so return null", e);
return null;
}
} else {
return input.toString();
}
Expand All @@ -122,4 +136,29 @@ private static String pgByteArrayToHexString(byte[] bytes) {
}
return hexString.toString();
}

private String handleNestedArray(Object array) {
if (array == null) {
return null;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
int length = java.lang.reflect.Array.getLength(array);
for (int i = 0; i < length; i++) {
Object element = java.lang.reflect.Array.get(array, i);
if (element != null && element.getClass().isArray()) {
sb.append(handleNestedArray(element));
} else if (element instanceof byte[]) {
sb.append(pgByteArrayToHexString((byte[]) element));
} else {
String elementStr = element != null ? element.toString().trim() : "null";
sb.append(elementStr);
}
if (i < length - 1) {
sb.append(", ");
}
}
sb.append("]");
return sb.toString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ public class Config extends ConfigBase {
"MySQL Jdbc Catalog mysql does not support pushdown functions"})
public static String[] jdbc_mysql_unsupported_pushdown_functions = {"date_trunc", "money_format", "negative"};

@ConfField(description = {"强制 SQLServer Jdbc Catalog 加密为 false",
@ConfField(mutable = true, masterOnly = true, description = {"强制 SQLServer Jdbc Catalog 加密为 false",
"Force SQLServer Jdbc Catalog encrypt to false"})
public static boolean force_sqlserver_jdbc_encrypt_false = false;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,68 @@

package org.apache.doris.datasource.jdbc.client;

import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.jdbc.util.JdbcFieldSchema;

import com.google.common.collect.Lists;

import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.List;
import java.util.Optional;

public class JdbcPostgreSQLClient extends JdbcClient {

protected JdbcPostgreSQLClient(JdbcClientConfig jdbcClientConfig) {
super(jdbcClientConfig);
}

@Override
public List<JdbcFieldSchema> getJdbcColumnsInfo(String localDbName, String localTableName) {
Connection conn = null;
ResultSet rs = null;
List<JdbcFieldSchema> tableSchema = Lists.newArrayList();
String remoteDbName = getRemoteDatabaseName(localDbName);
String remoteTableName = getRemoteTableName(localDbName, localTableName);
try {
conn = getConnection();
DatabaseMetaData databaseMetaData = conn.getMetaData();
String catalogName = getCatalogName(conn);
rs = getRemoteColumns(databaseMetaData, catalogName, remoteDbName, remoteTableName);
while (rs.next()) {
int dataType = rs.getInt("DATA_TYPE");
int arrayDimensions = 0;
if (dataType == Types.ARRAY) {
String columnName = rs.getString("COLUMN_NAME");
try (PreparedStatement pstmt = conn.prepareStatement(
String.format("SELECT array_ndims(%s) FROM %s.%s LIMIT 1",
columnName, remoteDbName, remoteTableName))) {
try (ResultSet arrayRs = pstmt.executeQuery()) {
if (arrayRs.next()) {
arrayDimensions = arrayRs.getInt(1);
}
}
}
}
tableSchema.add(new JdbcFieldSchema(rs, arrayDimensions));
}
} catch (SQLException e) {
throw new JdbcClientException("failed to get jdbc columns info for remote table `%s.%s`: %s",
remoteDbName, remoteTableName, Util.getRootCauseMessage(e));
} finally {
close(rs, conn);
}
return tableSchema;
}

@Override
protected String[] getTableTypes() {
return new String[] {"TABLE", "PARTITIONED TABLE", "VIEW", "MATERIALIZED VIEW", "FOREIGN TABLE"};
Expand Down Expand Up @@ -99,8 +150,25 @@ protected Type jdbcTypeToDoris(JdbcFieldSchema fieldSchema) {
case "json":
case "jsonb":
return ScalarType.createStringType();
default:
return Type.UNSUPPORTED;
default: {
if (fieldSchema.getDataType() == Types.ARRAY && pgType.startsWith("_")) {
int arrayDimensions = fieldSchema.getArrayDimensions().orElse(0);
if (arrayDimensions == 0 || pgType.equals("_bytea")) {
return Type.UNSUPPORTED;
}
String innerType = pgType.substring(1);
JdbcFieldSchema innerFieldSchema = new JdbcFieldSchema(fieldSchema);
innerFieldSchema.setDataTypeName(Optional.of(innerType));
Type arrayInnerType = jdbcTypeToDoris(innerFieldSchema);
Type arrayType = ArrayType.create(arrayInnerType, true);
for (int i = 1; i < arrayDimensions; i++) {
arrayType = ArrayType.create(arrayType, true);
}
return arrayType;
} else {
return Type.UNSUPPORTED;
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ public class JdbcFieldSchema {
// For NUMERIC/DECIMAL, columnSize means precision.
protected Optional<Integer> columnSize;
protected Optional<Integer> decimalDigits;
protected Optional<Integer> arrayDimensions;
// Base number (usually 10 or 2)
protected int numPrecRadix;
// column description
Expand All @@ -53,6 +54,7 @@ public JdbcFieldSchema(JdbcFieldSchema other) {
this.dataTypeName = other.dataTypeName;
this.columnSize = other.columnSize;
this.decimalDigits = other.decimalDigits;
this.arrayDimensions = other.arrayDimensions;
this.numPrecRadix = other.numPrecRadix;
this.remarks = other.remarks;
this.charOctetLength = other.charOctetLength;
Expand All @@ -71,6 +73,19 @@ public JdbcFieldSchema(ResultSet rs) throws SQLException {
this.charOctetLength = rs.getInt("CHAR_OCTET_LENGTH");
}

public JdbcFieldSchema(ResultSet rs, int arrayDimensions) throws SQLException {
this.columnName = rs.getString("COLUMN_NAME");
this.dataType = getInteger(rs, "DATA_TYPE").orElseThrow(() -> new IllegalStateException("DATA_TYPE is null"));
this.dataTypeName = Optional.ofNullable(rs.getString("TYPE_NAME"));
this.columnSize = getInteger(rs, "COLUMN_SIZE");
this.decimalDigits = getInteger(rs, "DECIMAL_DIGITS");
this.numPrecRadix = rs.getInt("NUM_PREC_RADIX");
this.isAllowNull = rs.getInt("NULLABLE") != DatabaseMetaData.columnNoNulls;
this.remarks = rs.getString("REMARKS");
this.charOctetLength = rs.getInt("CHAR_OCTET_LENGTH");
this.arrayDimensions = Optional.of(arrayDimensions);
}

public JdbcFieldSchema(ResultSet rs, Map<String, String> dataTypeOverrides) throws SQLException {
this.columnName = rs.getString("COLUMN_NAME");
this.dataType = getInteger(rs, "DATA_TYPE").orElseThrow(() -> new IllegalStateException("DATA_TYPE is null"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2234,6 +2234,18 @@ doris3 20
1 abc def 2022-10-11 1 2 3 2022-10-22T10:59:59 34.123 false 12.123456 10.16.10.14/32 10.16.10.14 ff:ff:ff:ff:ff:aa 1010101010 01010 1 {"id": 1} (1.0,1.0) {1.0,1.0,1.0} [(1.0,1.0),(2.0,2.0)] (2.0,2.0),(1.0,1.0) ((1.0,1.0),(2.0,2.0),(2.0,1.0)) ((1.0,1.0),(2.0,2.0),(2.0,1.0)) <(0.0,0.0),1.0>
2 \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N 2 \N \N \N \N \N \N \N \N

-- !select_all_arr_types --
1 [abc, def] [ghi, jkl] [2022-10-11, 2022-11-12] [1, 2] [2, 3] [3, 4] [2022-10-22 10:59:59.0, 2023-01-01 12:00:00.0] [34.123, 45.678] [12.123456, 34.56789] [10.16.10.14/32, 192.168.1.1/32] [10.16.10.14, 192.168.0.1] [ff:ff:ff:ff:ff:aa, 00:14:22:01:23:45] [{"id": 1}, {"id": 2}] [(1.0,1.0), (2.0,2.0)] [{1.0,1.0,1.0}, {2.0,2.0,2.0}] [[(1.0,1.0),(2.0,2.0)], [(3.0,3.0),(4.0,4.0)]] [(2.0,2.0),(1.0,1.0), (6.0,6.0),(5.0,5.0)] [((1.0,1.0),(2.0,2.0),(2.0,1.0)), ((3.0,3.0),(4.0,4.0),(5.0,5.0))] [((1.0,1.0),(2.0,2.0),(2.0,1.0)), ((3.0,3.0),(4.0,4.0),(5.0,5.0))] [<(0.0,0.0),1.0>, <(5.0,5.0),10.0>]

-- !select_all_arr2d_types --
1 [[abc, def], [ghi, jkl]] [[abc, def], [ghi, jkl]] [[2022-10-11, 2022-11-12], [2023-01-01, 2023-02-02]] [[1, 2], [3, 4]] [[1, 2], [2, 3]] [[3, 4], [4, 5]] [[2022-10-22 10:59:59.0, 2023-01-01 12:00:00.0], [2023-02-01 14:30:00.0, 2023-02-10 16:45:00.0]] [[34.123, 45.678], [56.789, 67.890]] [[12.123456, 34.56789], [56.789124, 45.678345]] [[10.16.10.14/32, 192.168.1.1/32], [172.16.0.1/32, 192.168.2.2/32]] [[10.16.10.14, 192.168.1.1], [172.16.0.1, 192.168.2.2]] [[ff:ff:ff:ff:ff:aa, 00:14:22:01:23:45], [11:22:33:44:55:66, 77:88:99:00:11:22]] [[{"id": 1}, {"id": 2}], [{"id": 3}, {"id": 4}]] [[(1.0,1.0), (2.0,2.0)], [(3.0,3.0), (4.0,4.0)]] [[{1.0,1.0,1.0}, {2.0,2.0,2.0}], [{3.0,3.0,3.0}, {4.0,4.0,4.0}]] [[[(1.0,1.0),(2.0,2.0)], [(3.0,3.0),(4.0,4.0)]], [[(5.0,5.0),(6.0,6.0)], [(7.0,7.0),(8.0,8.0)]]] [[(2.0,2.0),(1.0,1.0), (6.0,6.0),(5.0,5.0)], [(8.0,8.0),(7.0,7.0), (10.0,10.0),(9.0,9.0)]] [[((1.0,1.0),(2.0,2.0),(2.0,1.0)), ((3.0,3.0),(4.0,4.0),(5.0,5.0))], [((6.0,6.0),(7.0,7.0),(8.0,8.0)), ((9.0,9.0),(10.0,10.0),(11.0,11.0))]] [[((1.0,1.0),(2.0,2.0),(2.0,1.0)), ((3.0,3.0),(4.0,4.0),(5.0,5.0))], [((6.0,6.0),(7.0,7.0),(8.0,8.0)), ((9.0,9.0),(10.0,10.0),(11.0,11.0))]] [[<(0.0,0.0),1.0>, <(5.0,5.0),10.0>], [<(2.0,2.0),3.0>, <(7.0,7.0),14.0>]]

-- !select_all_arr_string_cast --
["abc", "def"]

-- !select_all_arr_int_cast --
[2, 3]

-- !ctas --
1 abc def 2022-10-11 1 2 3 2022-10-22T10:59:59 34.123 false 12.123456 10.16.10.14/32 10.16.10.14 ff:ff:ff:ff:ff:aa 1010101010 01010 1 {"id": 1} (1.0,1.0) {1.0,1.0,1.0} [(1.0,1.0),(2.0,2.0)] (2.0,2.0),(1.0,1.0) ((1.0,1.0),(2.0,2.0),(2.0,1.0)) ((1.0,1.0),(2.0,2.0),(2.0,1.0)) <(0.0,0.0),1.0>
2 \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N 2 \N \N \N \N \N \N \N \N
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,20 @@ suite("test_pg_jdbc_catalog", "p0,external,pg,external_docker,external_docker_pg
// test select all types
order_qt_select_all_types """select * from ${test_all_types}; """

// test select all array types

sql """admin set frontend config("enable_postgresql_array_mapping_to_array_string" = "true")"""

order_qt_select_all_arr_types """select * except(bit_value,bitn_value,bitnv_value) from test_all_types_array order by 1;"""

order_qt_select_all_arr2d_types """select * except(bit_value,bitn_value,bitnv_value) from test_all_types_2d_array order by 1;"""

order_qt_select_all_arr_string_cast """select cast(char_value as array<string>) from test_all_types_array"""

order_qt_select_all_arr_int_cast """select cast(int_value as array<int>) from test_all_types_array"""

sql """admin set frontend config("enable_postgresql_array_mapping_to_array_string" = "false")"""

// test test ctas
sql """ drop table if exists internal.${internal_db_name}.${test_ctas} """
sql """ create table internal.${internal_db_name}.${test_ctas}
Expand Down

0 comments on commit 3992643

Please sign in to comment.