Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: add full_match_handler.go to convert the query. #299

Merged
merged 27 commits into from
Dec 31, 2024
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
af3deb4
to #297 fix: add full_match_handler.go to convert the query.
TianyuZhang1214 Dec 19, 2024
c721fe2
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 19, 2024
b6d19f4
to #297 test: enable pg test for csharp
TianyuZhang1214 Dec 19, 2024
5f51a7b
Merge remote-tracking branch 'origin/297-ambiguous-reference' into 29…
TianyuZhang1214 Dec 19, 2024
aabe13b
to #297 fix: bugs in ConvertToSys()
TianyuZhang1214 Dec 23, 2024
f664aa1
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 23, 2024
f48b083
to #297 fix: mock pg_type, pg_namespace, pg_class, pg_proc and pg_ran…
TianyuZhang1214 Dec 24, 2024
7fd7bc3
to #316 fix: add HasSentRowDesc to make sure we only send the Row Des…
TianyuZhang1214 Dec 25, 2024
7cb3cca
to #316 fix: move initial data into .csv file.
TianyuZhang1214 Dec 25, 2024
0c07f92
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 25, 2024
5a14397
to #316 merge main
TianyuZhang1214 Dec 25, 2024
e014bb9
to #297 fix: downgrade .net version to 8.0
TianyuZhang1214 Dec 25, 2024
060d492
to #297 fix: add `initial-data-dir`.
TianyuZhang1214 Dec 25, 2024
a938146
to #297 fix: add `initial-data-dir`.
TianyuZhang1214 Dec 25, 2024
afc9c25
to #297 adopt CR
TianyuZhang1214 Dec 25, 2024
782a41e
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 26, 2024
3a6511d
to #297 feat: using go::embed to read files under directory.
TianyuZhang1214 Dec 26, 2024
1e08c33
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 26, 2024
c58d330
adopt PR
TianyuZhang1214 Dec 26, 2024
216d65d
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 26, 2024
1e30ec0
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 27, 2024
2369f13
adopt PR
TianyuZhang1214 Dec 27, 2024
2704cda
Merge remote-tracking branch 'origin/297-ambiguous-reference' into 29…
TianyuZhang1214 Dec 27, 2024
a5a91b3
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 30, 2024
fe0cc4b
adopt CR
TianyuZhang1214 Dec 30, 2024
4f32be5
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 30, 2024
30af75e
Merge branch 'main' into 297-ambiguous-reference
TianyuZhang1214 Dec 31, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions catalog/initial_data.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package catalog

var InitialDataTables = struct {
PGNamespace [][]any
PGRange [][]any
}{
PGNamespace: [][]any{
{"99", "pg_toast", "10", ""},
{"11", "pg_catalog", "10", "{postgres=UC/postgres,=U/postgres}"},
{"2200", "public", "6171", "{pg_database_owner,=UC/pg_database_owner,=U/pg_database_owner}"},
{"13219", "information_schema", "10", "{postgres=UC/postgres,=U/postgres}"},
{"16395", "test_schema", "10", ""},
},
PGRange: [][]any{
{"3904", "23", "4451", "0", "1978", "int4range_canonical", "int4range_subdiff"},
{"3906", "1700", "4532", "0", "3125", "-", "numrange_subdiff"},
{"3908", "1114", "4533", "0", "3128", "-", "tsrange_subdiff"},
{"3910", "1184", "4534", "0", "3127", "-", "tstzrange_subdiff"},
{"3912", "1082", "4535", "0", "3122", "daterange_canonical", "daterange_subdiff"},
{"3926", "20", "4536", "0", "3124", "int8range_canonical", "int8range_subdiff"},
},
}
339 changes: 331 additions & 8 deletions catalog/internal_tables.go

Large diffs are not rendered by default.

38 changes: 38 additions & 0 deletions catalog/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (

"github.com/apecloud/myduckserver/adapter"
"github.com/apecloud/myduckserver/configuration"
"github.com/apecloud/myduckserver/initialdata"
)

type DatabaseProvider struct {
Expand Down Expand Up @@ -143,6 +144,43 @@ func (prov *DatabaseProvider) initCatalog() error {
return fmt.Errorf("failed to insert initial data into internal table %q: %w", t.Name, err)
}
}

initialFileContent := initialdata.InitialTableDataMap[t.Name]
if initialFileContent != "" {
var count int
// Count rows in the internal table
if err := prov.storage.QueryRow(t.CountAllStmt()).Scan(&count); err != nil {
return fmt.Errorf("failed to count rows in internal table %q: %w", t.Name, err)
}

if count == 0 {
// Create temporary file to store initial data
tmpFile, err := os.CreateTemp("", "initial-data-"+t.Name+".csv")
if err != nil {
return fmt.Errorf("failed to create temporary file for initial data: %w", err)
}
// Ensure the temporary file is removed after usage
defer os.Remove(tmpFile.Name())
defer tmpFile.Close()

// Write the initial data to the temporary file
if _, err := tmpFile.WriteString(initialFileContent); err != nil {
return fmt.Errorf("failed to write initial data to temporary file: %w", err)
}

if err = tmpFile.Sync(); err != nil {
return fmt.Errorf("failed to sync initial data file: %w", err)
}

// Execute the COPY command to insert data into the table
if _, err := prov.storage.ExecContext(
TianyuZhang1214 marked this conversation as resolved.
Show resolved Hide resolved
context.Background(),
fmt.Sprintf("COPY %s FROM '%s' (DELIMITER ',', HEADER)", t.QualifiedName(), tmpFile.Name()),
); err != nil {
return fmt.Errorf("failed to insert initial data from file into internal table %q: %w", t.Name, err)
}
}
}
}

if _, err := prov.pool.ExecContext(context.Background(), "PRAGMA enable_checkpoint_on_shutdown"); err != nil {
Expand Down
2 changes: 1 addition & 1 deletion compatibility/pg/csharp/PGTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ public class Tests

public void Connect(string ip, int port, string user, string password)
{
string connectionString = $"Host={ip};Port={port};Username={user};Password={password};Database=postgres;";
string connectionString = $"Host={ip};Port={port};Username={user};Password={password};Database=postgres;Timeout=300;CommandTimeout=600;";
try
{
conn = new NpgsqlConnection(connectionString);
Expand Down
12 changes: 5 additions & 7 deletions compatibility/pg/test.bats
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,11 @@ start_process() {
start_process $BATS_TEST_DIRNAME/c/pg_test 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
}

# Failed because of the following error:
# > Catalog Error: Table with name pg_range does not exist!
# @test "pg-csharp" {
# set_custom_teardown "sudo pkill -f dotnet"
# start_process dotnet build $BATS_TEST_DIRNAME/csharp/PGTest.csproj -o $BATS_TEST_DIRNAME/csharp/bin
# start_process dotnet $BATS_TEST_DIRNAME/csharp/bin/PGTest.dll 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
# }
@test "pg-csharp" {
set_custom_teardown "sudo pkill -f dotnet"
start_process dotnet build $BATS_TEST_DIRNAME/csharp/PGTest.csproj -o $BATS_TEST_DIRNAME/csharp/bin
start_process dotnet $BATS_TEST_DIRNAME/csharp/bin/PGTest.dll 127.0.0.1 5432 postgres "" $BATS_TEST_DIRNAME/test.data
}

@test "pg-go" {
start_process go build -o $BATS_TEST_DIRNAME/go/pg $BATS_TEST_DIRNAME/go/pg.go
Expand Down
18 changes: 18 additions & 0 deletions initialdata/file_content.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package initialdata

import _ "embed"

//go:embed pg_class.csv
var pgClassContent string

//go:embed pg_proc.csv
var pgProcContent string

//go:embed pg_type.csv
var pgTypeContent string

var InitialTableDataMap = map[string]string{
"pg_class": pgClassContent,
"pg_proc": pgProcContent,
"pg_type": pgTypeContent,
}
Loading
Loading