Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,10 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
.executeUpdate()

conn.prepareStatement("CREATE TABLE st_with_array (c0 uuid, c1 inet, c2 cidr," +
"c3 json, c4 jsonb, c5 uuid[], c6 inet[], c7 cidr[], c8 json[], c9 jsonb[])")
"c3 json, c4 jsonb, c5 uuid[], c6 inet[], c7 cidr[], c8 json[], c9 jsonb[], c10 xml[], " +
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about adding tests for non-array cases, e.g., col xml and col tsvector?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found there are lots of types not tested so far. So I'll open another PR for non-array cases and focus on array cases in this PR.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does that mean there are types that aren't supported, but for which we support arrays? did I misunderstand that?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I mean that some supported non-array types like numeric, varchar, date and etc are not tested.
So, we need to add tests for those types in addition to the types supported by this PR.

"c11 tsvector[], c12 tsquery[], c13 macaddr[], c14 txid_snapshot[], c15 point[], " +
"c16 line[], c17 lseg[], c18 box[], c19 path[], c20 polygon[], c21 circle[], c22 pg_lsn[], " +
"c23 bit varying(6)[], c24 interval[], c25 macaddr8[], c26 pg_snapshot[])")
.executeUpdate()
conn.prepareStatement("INSERT INTO st_with_array VALUES ( " +
"'0a532531-cdf1-45e3-963d-5de90b6a30f1', '172.168.22.1', '192.168.100.128/25', " +
Expand All @@ -98,7 +101,24 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
"'205f9bfc-018c-4452-a605-609c0cfad228']::uuid[], ARRAY['172.16.0.41', " +
"'172.16.0.42']::inet[], ARRAY['192.168.0.0/24', '10.1.0.0/16']::cidr[], " +
"""ARRAY['{"a": "foo", "b": "bar"}', '{"a": 1, "b": 2}']::json[], """ +
"""ARRAY['{"a": 1, "b": 2, "c": 3}']::jsonb[])"""
"""ARRAY['{"a": 1, "b": 2, "c": 3}']::jsonb[], """ +
"""ARRAY['<key>id</key><value>10</value>']::xml[], ARRAY['The dog laying on the grass', """ +
"""'the:1 cat:2 is:3 on:4 the:5 table:6']::tsvector[], """ +
"""ARRAY['programming & language & ! interpreter', 'cat:AB & dog:CD']::tsquery[], """ +
"""ARRAY['12:34:56:78:90:ab', 'cd-ef-12-34-56-78']::macaddr[], """ +
"""ARRAY['10:20:10,14,15']::txid_snapshot[], """ +
"""ARRAY['(800, 600)', '83.24, 5.10']::point[], """ +
"""ARRAY['(23.8, 56.2), (16.23, 89.2)', '{23.85, 10.87, 5.92}']::line[], """ +
"""ARRAY['[(80.12, 131.24), (201.5, 503.33)]']::lseg[], """ +
"""ARRAY['(19.84, 11.23), (20.21, 2.1)']::box[], """ +
"""ARRAY['(10.2, 30.4), (50.6, 70.8), (90.1, 11.3)']::path[], """ +
"""ARRAY['((100.3, 40.2), (20.198, 83.1), (500.821, 311.38))']::polygon[], """ +
"""ARRAY['<500, 200, 100>']::circle[], """ +
"""ARRAY['16/B374D848']::pg_lsn[], """ +
"""ARRAY[B'101010']::bit varying(6)[], """ +
"""ARRAY['1 day', '2 minutes']::interval[], """ +
"""ARRAY['08:00:2b:01:02:03:04:05']::macaddr8[], """ +
"""ARRAY['10:20:10,14,15']::pg_snapshot[])"""
).executeUpdate()

conn.prepareStatement("CREATE TABLE char_types (" +
Expand Down Expand Up @@ -262,6 +282,27 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
assert(rows(0).getSeq(7) == Seq("192.168.0.0/24", "10.1.0.0/16"))
assert(rows(0).getSeq(8) == Seq("""{"a": "foo", "b": "bar"}""", """{"a": 1, "b": 2}"""))
assert(rows(0).getSeq(9) == Seq("""{"a": 1, "b": 2, "c": 3}"""))
assert(rows(0).getSeq(10) == Seq("""<key>id</key><value>10</value>"""))
assert(rows(0).getSeq(11) == Seq("'The' 'dog' 'grass' 'laying' 'on' 'the'",
"'cat':2 'is':3 'on':4 'table':6 'the':1,5"))
assert(rows(0).getSeq(12) == Seq("'programming' & 'language' & !'interpreter'",
"'cat':AB & 'dog':CD"))
assert(rows(0).getSeq(13) == Seq("12:34:56:78:90:ab", "cd:ef:12:34:56:78"))
assert(rows(0).getSeq(14) == Seq("10:20:10,14,15"))
assert(rows(0).getSeq(15) == Seq("(800.0,600.0)", "(83.24,5.1)"))
assert(rows(0).getSeq(16) == Seq("{-4.359313077939234,-1.0,159.9516512549538}",
"{23.85,10.87,5.92}"))
assert(rows(0).getSeq(17) == Seq("[(80.12,131.24),(201.5,503.33)]"))
assert(rows(0).getSeq(18) == Seq("(20.21,11.23),(19.84,2.1)"))
assert(rows(0).getSeq(19) == Seq("((10.2,30.4),(50.6,70.8),(90.1,11.3))"))
assert(rows(0).getSeq(20) == Seq("((100.3,40.2),(20.198,83.1),(500.821,311.38))"))
assert(rows(0).getSeq(21) == Seq("<(500.0,200.0),100.0>"))
assert(rows(0).getSeq(22) == Seq("16/B374D848"))
assert(rows(0).getSeq(23) == Seq("101010"))
assert(rows(0).getSeq(24) == Seq("0 years 0 mons 1 days 0 hours 0 mins 0.00 secs",
"0 years 0 mons 0 days 0 hours 2 mins 0.00 secs"))
assert(rows(0).getSeq(25) == Seq("08:00:2b:01:02:03:04:05"))
assert(rows(0).getSeq(26) == Seq("10:20:10,14,15"))
}

test("query JDBC option") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@ private object PostgresDialect extends JdbcDialect {
case "int8" | "oid" => Some(LongType)
case "float4" => Some(FloatType)
case "money" | "float8" => Some(DoubleType)
case "text" | "varchar" | "char" | "bpchar" | "cidr" | "inet" | "json" | "jsonb" | "uuid" =>
case "text" | "varchar" | "char" | "bpchar" | "cidr" | "inet" | "json" | "jsonb" | "uuid" |
"xml" | "tsvector" | "tsquery" | "macaddr" | "macaddr8" | "txid_snapshot" | "point" |
"line" | "lseg" | "box" | "path" | "polygon" | "circle" | "pg_lsn" | "varbit" |
"interval" | "pg_snapshot" =>
Some(StringType)
case "bytea" => Some(BinaryType)
case "timestamp" | "timestamptz" | "time" | "timetz" => Some(TimestampType)
Expand Down