From 8eec17652f0db11750fe3934f981744bc4c1db34 Mon Sep 17 00:00:00 2001 From: Lisa Best Date: Tue, 15 Oct 2024 10:37:30 +1030 Subject: [PATCH 1/3] Address items raised in issue 237 - Remove the v13 migration - Replace with migration script with one that adds update_time with a default of 'epoch' if the column doesn't exist, and sets the default of 'epoch' if the column does exist - Fix the PostgresIndexDao.indexWorkflow to also set the update_time during an UPDATE - Make the backfill script for update_time separate an optional and fix the timestamp to include milliseconds --- .../config/PostgresConfiguration.java | 16 +++-- .../postgres/config/PostgresProperties.java | 11 ++++ .../postgres/dao/PostgresIndexDAO.java | 3 +- .../V13.1__workflow_index_columns.sql | 6 ++ .../V13__workflow_index_columns.sql | 8 --- ...2__workflow_index_backfill_update_time.sql | 4 ++ ...ostgresConfigurationDataMigrationTest.java | 62 +++++++++++++++++++ 7 files changed, 96 insertions(+), 14 deletions(-) create mode 100644 postgres-persistence/src/main/resources/db/migration_postgres/V13.1__workflow_index_columns.sql delete mode 100644 postgres-persistence/src/main/resources/db/migration_postgres/V13__workflow_index_columns.sql create mode 100644 postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql create mode 100644 postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java index 1e00cb067..e1165ca55 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java @@ -13,6 +13,7 @@ package com.netflix.conductor.postgres.config; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Map; import java.util.Optional; @@ -25,7 +26,6 @@ import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.*; -import org.springframework.core.env.*; import org.springframework.retry.RetryContext; import org.springframework.retry.backoff.NoBackOffPolicy; import org.springframework.retry.policy.SimpleRetryPolicy; @@ -58,13 +58,19 @@ public PostgresConfiguration(DataSource dataSource, PostgresProperties propertie public Flyway flywayForPrimaryDb() { FluentConfiguration config = Flyway.configure(); + var locations = new ArrayList(); + locations.add("classpath:db/migration_postgres"); + if (properties.getExperimentalQueueNotify()) { - config.locations( - "classpath:db/migration_postgres", "classpath:db/migration_postgres_notify"); - } else { - config.locations("classpath:db/migration_postgres"); + locations.add("classpath:db/migration_postgres_notify"); + } + + if (properties.isApplyDataMigrations()) { + locations.add("classpath:db/migration_postgres_data"); } + config.locations(locations.toArray(new String[0])); + return config.configuration(Map.of("flyway.postgresql.transactional.lock", "false")) .schemas(properties.getSchema()) .dataSource(dataSource) diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java index 0ddf80098..9c650e694 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java @@ -39,6 +39,9 @@ public class PostgresProperties { private boolean onlyIndexOnStatusChange = false; + /** The boolean indicating whether data migrations should be executed */ + private boolean applyDataMigrations = true; + public String schema = "public"; public boolean allowFullTextQueries = true; @@ -83,6 +86,14 @@ public void setOnlyIndexOnStatusChange(boolean onlyIndexOnStatusChange) { this.onlyIndexOnStatusChange = onlyIndexOnStatusChange; } + public boolean isApplyDataMigrations() { + return applyDataMigrations; + } + + public void setApplyDataMigrations(boolean applyDataMigrations) { + this.applyDataMigrations = applyDataMigrations; + } + public Integer getDeadlockRetryMax() { return deadlockRetryMax; } diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/dao/PostgresIndexDAO.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/dao/PostgresIndexDAO.java index b0481a293..b50756cca 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/dao/PostgresIndexDAO.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/dao/PostgresIndexDAO.java @@ -85,7 +85,8 @@ public void indexWorkflow(WorkflowSummary workflow) { "INSERT INTO workflow_index (workflow_id, correlation_id, workflow_type, start_time, update_time, status, json_data)" + "VALUES (?, ?, ?, ?, ?, ?, ?::JSONB) ON CONFLICT (workflow_id) \n" + "DO UPDATE SET correlation_id = EXCLUDED.correlation_id, workflow_type = EXCLUDED.workflow_type, " - + "start_time = EXCLUDED.start_time, status = EXCLUDED.status, json_data = EXCLUDED.json_data " + + "start_time = EXCLUDED.start_time, status = EXCLUDED.status, json_data = EXCLUDED.json_data, " + + "update_time = EXCLUDED.update_time " + "WHERE EXCLUDED.update_time >= workflow_index.update_time"; if (onlyIndexOnStatusChange) { diff --git a/postgres-persistence/src/main/resources/db/migration_postgres/V13.1__workflow_index_columns.sql b/postgres-persistence/src/main/resources/db/migration_postgres/V13.1__workflow_index_columns.sql new file mode 100644 index 000000000..f36334d37 --- /dev/null +++ b/postgres-persistence/src/main/resources/db/migration_postgres/V13.1__workflow_index_columns.sql @@ -0,0 +1,6 @@ +ALTER TABLE workflow_index + ADD COLUMN IF NOT EXISTS update_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT TIMESTAMP WITH TIME ZONE 'epoch'; + +-- SET DEFAULT AGAIN IN CASE COLUMN ALREADY EXISTED from deleted V13 migration +ALTER TABLE workflow_index + ALTER COLUMN update_time SET DEFAULT TIMESTAMP WITH TIME ZONE 'epoch'; diff --git a/postgres-persistence/src/main/resources/db/migration_postgres/V13__workflow_index_columns.sql b/postgres-persistence/src/main/resources/db/migration_postgres/V13__workflow_index_columns.sql deleted file mode 100644 index c57c03f4e..000000000 --- a/postgres-persistence/src/main/resources/db/migration_postgres/V13__workflow_index_columns.sql +++ /dev/null @@ -1,8 +0,0 @@ -ALTER TABLE workflow_index -ADD update_time TIMESTAMP WITH TIME ZONE NULL; - -UPDATE workflow_index -SET update_time = to_timestamp(json_data->>'updateTime', 'YYYY-MM-DDTHH24:MI:SSZ')::timestamp WITH time zone; - -ALTER TABLE workflow_index -ALTER COLUMN update_time SET NOT NULL; \ No newline at end of file diff --git a/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql b/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql new file mode 100644 index 000000000..f63e6d6a7 --- /dev/null +++ b/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql @@ -0,0 +1,4 @@ +-- Optional back-fill script to populate updateTime historically. +UPDATE workflow_index +SET update_time = to_timestamp(json_data->>'updateTime', 'YYYY-MM-DDTHH24:MI:SS.MSZ')::timestamp WITH time zone +WHERE json_data->>'updateTime' IS NOT NULL; \ No newline at end of file diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java new file mode 100644 index 000000000..e45525ade --- /dev/null +++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java @@ -0,0 +1,62 @@ +package com.netflix.conductor.postgres.config; + +import com.netflix.conductor.common.config.TestObjectMapperConfiguration; +import org.flywaydb.core.Flyway; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.ResourcePatternResolver; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.Arrays; +import java.util.Objects; + +import static org.junit.Assert.assertTrue; + +@ContextConfiguration( + classes = { + TestObjectMapperConfiguration.class, + PostgresConfiguration.class, + FlywayAutoConfiguration.class + }) +@RunWith(SpringRunner.class) +@TestPropertySource( + properties = { + "conductor.app.asyncIndexingEnabled=false", + "conductor.elasticsearch.version=0", + "conductor.indexing.type=postgres", + "conductor.postgres.applyDataMigrations=false", + "spring.flyway.clean-disabled=false" + }) +@SpringBootTest +public class PostgresConfigurationDataMigrationTest { + + @Autowired + Flyway flyway; + + @Autowired + ResourcePatternResolver resourcePatternResolver; + + // clean the database between tests. + @Before + public void before() { + flyway.migrate(); + } + + @Test + public void dataMigrationIsNotAppliedWhenDisabled() throws Exception { + var files = resourcePatternResolver.getResources("classpath:db/migration_postgres_data/*"); + Arrays.stream(flyway.info().applied()).forEach(migrationInfo -> + assertTrue("Data migration wrongly applied: " + migrationInfo.getScript(), + Arrays.stream(files) + .map(Resource::getFilename) + .filter(Objects::nonNull) + .noneMatch(fileName -> fileName.contains(migrationInfo.getScript())))); + } +} From 27a93555b196ed2ab8b162cf53a74d24ff076e61 Mon Sep 17 00:00:00 2001 From: Lisa Best Date: Fri, 18 Oct 2024 09:01:03 +1030 Subject: [PATCH 2/3] Fix spotless errors --- ...ostgresConfigurationDataMigrationTest.java | 61 ++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java index e45525ade..0f5167fd6 100644 --- a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java +++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/config/PostgresConfigurationDataMigrationTest.java @@ -1,6 +1,20 @@ +/* + * Copyright 2024 Conductor Authors. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ package com.netflix.conductor.postgres.config; -import com.netflix.conductor.common.config.TestObjectMapperConfiguration; +import java.util.Arrays; +import java.util.Objects; + import org.flywaydb.core.Flyway; import org.junit.Before; import org.junit.Test; @@ -14,34 +28,31 @@ import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; -import java.util.Arrays; -import java.util.Objects; +import com.netflix.conductor.common.config.TestObjectMapperConfiguration; import static org.junit.Assert.assertTrue; @ContextConfiguration( classes = { - TestObjectMapperConfiguration.class, - PostgresConfiguration.class, - FlywayAutoConfiguration.class + TestObjectMapperConfiguration.class, + PostgresConfiguration.class, + FlywayAutoConfiguration.class }) @RunWith(SpringRunner.class) @TestPropertySource( properties = { - "conductor.app.asyncIndexingEnabled=false", - "conductor.elasticsearch.version=0", - "conductor.indexing.type=postgres", - "conductor.postgres.applyDataMigrations=false", - "spring.flyway.clean-disabled=false" + "conductor.app.asyncIndexingEnabled=false", + "conductor.elasticsearch.version=0", + "conductor.indexing.type=postgres", + "conductor.postgres.applyDataMigrations=false", + "spring.flyway.clean-disabled=false" }) @SpringBootTest public class PostgresConfigurationDataMigrationTest { - @Autowired - Flyway flyway; + @Autowired Flyway flyway; - @Autowired - ResourcePatternResolver resourcePatternResolver; + @Autowired ResourcePatternResolver resourcePatternResolver; // clean the database between tests. @Before @@ -52,11 +63,19 @@ public void before() { @Test public void dataMigrationIsNotAppliedWhenDisabled() throws Exception { var files = resourcePatternResolver.getResources("classpath:db/migration_postgres_data/*"); - Arrays.stream(flyway.info().applied()).forEach(migrationInfo -> - assertTrue("Data migration wrongly applied: " + migrationInfo.getScript(), - Arrays.stream(files) - .map(Resource::getFilename) - .filter(Objects::nonNull) - .noneMatch(fileName -> fileName.contains(migrationInfo.getScript())))); + Arrays.stream(flyway.info().applied()) + .forEach( + migrationInfo -> + assertTrue( + "Data migration wrongly applied: " + + migrationInfo.getScript(), + Arrays.stream(files) + .map(Resource::getFilename) + .filter(Objects::nonNull) + .noneMatch( + fileName -> + fileName.contains( + migrationInfo + .getScript())))); } } From ca60fcadf89d0814e47cb40e9cda588e6b35b9e2 Mon Sep 17 00:00:00 2001 From: Lisa Best Date: Fri, 18 Oct 2024 11:59:37 +1030 Subject: [PATCH 3/3] Amend the backfill script as further testing detected it was not handling hours correctly or interpreting the Z timezone as UTC. --- .../V13.2__workflow_index_backfill_update_time.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql b/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql index f63e6d6a7..2ffbec396 100644 --- a/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql +++ b/postgres-persistence/src/main/resources/db/migration_postgres_data/V13.2__workflow_index_backfill_update_time.sql @@ -1,4 +1,4 @@ -- Optional back-fill script to populate updateTime historically. UPDATE workflow_index -SET update_time = to_timestamp(json_data->>'updateTime', 'YYYY-MM-DDTHH24:MI:SS.MSZ')::timestamp WITH time zone +SET update_time = to_timestamp(json_data->>'updateTime', 'YYYY-MM-DD"T"HH24:MI:SS.MS')::timestamp AT TIME ZONE '00:00' WHERE json_data->>'updateTime' IS NOT NULL; \ No newline at end of file