diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml index c7462108d760..722f9e4b101f 100644 --- a/hadoop-hdds/common/src/main/resources/ozone-default.xml +++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml @@ -2374,101 +2374,6 @@ If enabled, tracing information is sent to tracing server. - - ozone.recon.sql.db.driver - org.sqlite.JDBC - OZONE, RECON - - Database driver class name available on the - Ozone Recon classpath. - - - - ozone.recon.sql.db.jdbc.url - jdbc:sqlite:${ozone.recon.db.dir}/ozone_recon_sqlite.db - OZONE, RECON - - Ozone Recon SQL database jdbc url. - - - - ozone.recon.sql.db.username - - OZONE, RECON - - Ozone Recon SQL database username. - - - - ozone.recon.sql.db.password - - OZONE, RECON - - Ozone Recon database password. - - - - ozone.recon.sql.db.auto.commit - false - OZONE, RECON - - Sets the Ozone Recon database connection property of auto-commit to - true/false. - - - - ozone.recon.sql.db.conn.timeout - 30000 - OZONE, RECON - - Sets time in milliseconds before call to getConnection is timed out. - - - - ozone.recon.sql.db.conn.max.active - 1 - OZONE, RECON - - The max active connections to the SQL database. The default SQLite - database only allows single active connection, set this to a - reasonable value like 10, for external production database. - - - - ozone.recon.sql.db.conn.max.age - 1800 - OZONE, RECON - - Sets maximum time a connection can be active in seconds. - - - - ozone.recon.sql.db.conn.idle.max.age - 3600 - OZONE, RECON - - Sets maximum time to live for idle connection in seconds. - - - - ozone.recon.sql.db.conn.idle.test.period - 60 - OZONE, RECON - - This sets the time (in seconds), for a connection to remain idle before - sending a test query to the DB. This is useful to prevent a DB from - timing out connections on its end. - - - - ozone.recon.sql.db.conn.idle.test - SELECT 1 - OZONE, RECON - - The query to send to the DB to maintain keep-alives and test for dead - connections. - - ozone.recon.task.thread.count 1 diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java index e444355ced21..5baa65b43c37 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java @@ -29,7 +29,7 @@ import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_HTTP_ADDRESS_KEY; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_OM_SNAPSHOT_DB_DIR; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; +import static org.hadoop.ozone.recon.codegen.ReconSqlDbConfig.ConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; import java.io.File; import java.io.IOException; @@ -804,8 +804,8 @@ protected void configureRecon() throws IOException { .getAbsolutePath()); conf.set(OZONE_RECON_SCM_DB_DIR, tempNewFolder.getAbsolutePath()); - conf.set(OZONE_RECON_SQL_DB_JDBC_URL, "jdbc:sqlite:" + - tempNewFolder.getAbsolutePath() + "/ozone_recon_sqlite.db"); + conf.set(OZONE_RECON_SQL_DB_JDBC_URL, "jdbc:derby:" + + tempNewFolder.getAbsolutePath() + "/ozone_recon_derby.db"); conf.set(OZONE_RECON_HTTP_ADDRESS_KEY, "0.0.0.0:0"); conf.set(OZONE_RECON_DATANODE_ADDRESS_KEY, "0.0.0.0:0"); diff --git a/hadoop-ozone/recon-codegen/pom.xml b/hadoop-ozone/recon-codegen/pom.xml index 75950896e329..9b8780bbff37 100644 --- a/hadoop-ozone/recon-codegen/pom.xml +++ b/hadoop-ozone/recon-codegen/pom.xml @@ -29,8 +29,9 @@ hadoop-ozone-common - org.xerial - sqlite-jdbc + org.apache.derby + derby + 10.14.2.0 com.google.inject.extensions diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java index ad9b819c1941..246f03910c96 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java @@ -17,13 +17,19 @@ */ package org.hadoop.ozone.recon.codegen; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; + import java.io.File; +import java.nio.file.Paths; import java.sql.SQLException; import java.util.Set; import javax.sql.DataSource; import org.apache.commons.io.FileUtils; +import org.apache.derby.jdbc.EmbeddedDataSource; +import org.apache.hadoop.util.Time; import org.hadoop.ozone.recon.schema.ReconSchemaDefinition; import org.jooq.codegen.GenerationTool; import org.jooq.meta.jaxb.Configuration; @@ -31,11 +37,11 @@ import org.jooq.meta.jaxb.Generate; import org.jooq.meta.jaxb.Generator; import org.jooq.meta.jaxb.Jdbc; +import org.jooq.meta.jaxb.Logging; import org.jooq.meta.jaxb.Strategy; import org.jooq.meta.jaxb.Target; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.sqlite.SQLiteDataSource; import com.google.inject.AbstractModule; import com.google.inject.Guice; @@ -55,10 +61,11 @@ public class JooqCodeGenerator { private static final Logger LOG = LoggerFactory.getLogger(JooqCodeGenerator.class); - private static final String SQLITE_DB = - System.getProperty("java.io.tmpdir") + "/recon-generated-schema"; - private static final String JDBC_URL = "jdbc:sqlite:" + SQLITE_DB; - + private static final String DB = Paths.get( + System.getProperty("java.io.tmpdir"), + "recon-generated-schema-" + Time.monotonicNow()).toString(); + public static final String RECON_SCHEMA_NAME = "RECON"; + private static final String JDBC_URL = "jdbc:derby:" + DB; private final Set allDefinitions; @Inject @@ -82,26 +89,25 @@ private void generateSourceCode(String outputDir) throws Exception { Configuration configuration = new Configuration() .withJdbc(new Jdbc() - .withDriver("org.sqlite.JDBC") - .withUrl(JDBC_URL) - .withUser("sa") - .withPassword("sa")) + .withDriver(DERBY_DRIVER_CLASS) + .withUrl(JDBC_URL)) .withGenerator(new Generator() .withDatabase(new Database() - .withName("org.jooq.meta.sqlite.SQLiteDatabase") + .withName("org.jooq.meta.derby.DerbyDatabase") .withOutputSchemaToDefault(true) .withIncludeTables(true) - .withIncludePrimaryKeys(true)) + .withIncludePrimaryKeys(true) + .withInputSchema(RECON_SCHEMA_NAME)) .withGenerate(new Generate() .withDaos(true) - .withEmptyCatalogs(true) - .withEmptySchemas(true)) + .withEmptyCatalogs(true)) .withStrategy(new Strategy().withName( "org.hadoop.ozone.recon.codegen.TableNamingStrategy")) .withTarget(new Target() .withPackageName("org.hadoop.ozone.recon.schema") .withClean(true) - .withDirectory(outputDir))); + .withDirectory(outputDir))) + .withLogging(Logging.WARN); GenerationTool.generate(configuration); } @@ -109,20 +115,25 @@ private void generateSourceCode(String outputDir) throws Exception { * Provider for embedded datasource. */ static class LocalDataSourceProvider implements Provider { - private static SQLiteDataSource db; - + private static EmbeddedDataSource dataSource; static { - db = new SQLiteDataSource(); - db.setUrl(JDBC_URL); + try { + createNewDerbyDatabase(JDBC_URL, RECON_SCHEMA_NAME); + } catch (Exception e) { + LOG.error("Error creating Recon Derby DB.", e); + } + dataSource = new EmbeddedDataSource(); + dataSource.setDatabaseName(DB); + dataSource.setUser(RECON_SCHEMA_NAME); } @Override public DataSource get() { - return db; + return dataSource; } static void cleanup() { - FileUtils.deleteQuietly(new File(SQLITE_DB)); + FileUtils.deleteQuietly(new File(DB)); } } diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java new file mode 100644 index 000000000000..704d26bdecce --- /dev/null +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.hadoop.ozone.recon.codegen; + +import static java.util.concurrent.TimeUnit.SECONDS; + +import org.apache.hadoop.hdds.conf.Config; +import org.apache.hadoop.hdds.conf.ConfigGroup; +import org.apache.hadoop.hdds.conf.ConfigTag; +import org.apache.hadoop.hdds.conf.ConfigType; + +/** + * The configuration class for the Recon SQL DB. + */ +@ConfigGroup(prefix = "ozone.recon.sql.db") +public class ReconSqlDbConfig { + + @Config(key = "driver", + type = ConfigType.STRING, + defaultValue = "org.apache.derby.jdbc.EmbeddedDriver", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Recon SQL DB driver class. Defaults to Derby." + ) + private String driverClass; + + public String getDriverClass() { + return driverClass; + } + + public void setDriverClass(String driverClass) { + this.driverClass = driverClass; + } + + @Config(key = "jdbc.url", + type = ConfigType.STRING, + defaultValue = "jdbc:derby:${ozone.recon.db.dir}/ozone_recon_derby.db", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database jdbc url." + ) + private String jdbcUrl; + + public String getJdbcUrl() { + return jdbcUrl; + } + + public void setJdbcUrl(String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + } + + @Config(key = "username", + type = ConfigType.STRING, + defaultValue = "", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database username." + ) + private String username; + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + @Config(key = "password", + type = ConfigType.STRING, + defaultValue = "", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database password." + ) + private String password; + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + @Config(key = "auto.commit", + type = ConfigType.BOOLEAN, + defaultValue = "false", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets the Ozone Recon database connection property of " + + "auto-commit to true/false." + ) + private boolean autoCommit; + + public boolean isAutoCommit() { + return autoCommit; + } + + public void setAutoCommit(boolean autoCommit) { + this.autoCommit = autoCommit; + } + + @Config(key = "conn.timeout", + type = ConfigType.TIME, + defaultValue = "30000ms", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets time in milliseconds before call to getConnection " + + "is timed out." + ) + private long connectionTimeout; + + public long getConnectionTimeout() { + return connectionTimeout; + } + + public void setConnectionTimeout(long connectionTimeout) { + this.connectionTimeout = connectionTimeout; + } + + @Config(key = "conn.max.active", + type = ConfigType.INT, + defaultValue = "5", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "The max active connections to the SQL database." + ) + private int maxActiveConnections; + + public int getMaxActiveConnections() { + return maxActiveConnections; + } + + public void setMaxActiveConnections(int maxActiveConnections) { + this.maxActiveConnections = maxActiveConnections; + } + + @Config(key = "conn.max.age", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "1800s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time a connection can be active in seconds." + ) + private long connectionMaxAge; + + public long getConnectionMaxAge() { + return connectionMaxAge; + } + + public void setConnectionMaxAge(long connectionMaxAge) { + this.connectionMaxAge = connectionMaxAge; + } + + @Config(key = "conn.idle.max.age", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "3600s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time to live for idle connection in seconds." + ) + private long connectionIdleMaxAge; + + public long getConnectionIdleMaxAge() { + return connectionIdleMaxAge; + } + + public void setConnectionIdleMaxAge(long connectionIdleMaxAge) { + this.connectionIdleMaxAge = connectionIdleMaxAge; + } + + @Config(key = "conn.idle.test.period", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "60s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time to live for idle connection in seconds." + ) + private long connectionIdleTestPeriod; + + public long getConnectionIdleTestPeriod() { + return connectionIdleTestPeriod; + } + + public void setConnectionIdleTestPeriod(long connectionIdleTestPeriod) { + this.connectionIdleTestPeriod = connectionIdleTestPeriod; + } + + @Config(key = "conn.idle.test", + type = ConfigType.STRING, + defaultValue = "SELECT 1", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "The query to send to the DB to maintain keep-alives and " + + "test for dead connections." + ) + private String idleTestQuery; + + public String getIdleTestQuery() { + return idleTestQuery; + } + + public void setIdleTestQuery(String idleTestQuery) { + this.idleTestQuery = idleTestQuery; + } + + @Config(key = "jooq.dialect", + type = ConfigType.STRING, + defaultValue = "DERBY", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Recon internally uses Jooq to talk to its SQL DB. By " + + "default, we support Derby and Sqlite out of the box. Please refer " + + "to https://www.jooq.org/javadoc/latest/org" + + ".jooq/org/jooq/SQLDialect.html to specify different dialect." + ) + private String sqlDbDialect; + + public String getSqlDbDialect() { + return sqlDbDialect; + } + + public void setSqlDbDialect(String sqlDbDialect) { + this.sqlDbDialect = sqlDbDialect; + } + + /** + * Class to hold config keys related to Recon SQL DB. + */ + public static class ConfigKeys { + public static final String OZONE_RECON_SQL_DB_JDBC_URL = + "ozone.recon.sql.db.jdbc.url"; + } +} diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java new file mode 100644 index 000000000000..7e68541cf840 --- /dev/null +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.hadoop.ozone.recon.codegen; + +import static org.jooq.impl.DSL.count; + +import java.io.IOException; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.function.BiPredicate; + +import org.jooq.exception.DataAccessException; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Constants and Helper functions for Recon SQL related stuff. + */ +public final class SqlDbUtils { + + public final static String DERBY_DRIVER_CLASS = + "org.apache.derby.jdbc.EmbeddedDriver"; + public final static String SQLITE_DRIVER_CLASS = "org.sqlite.JDBC"; + public final static String DERBY_DISABLE_LOG_METHOD = + SqlDbUtils.class.getName() + ".disableDerbyLogFile"; + + private static final Logger LOG = + LoggerFactory.getLogger(SqlDbUtils.class); + + private SqlDbUtils() { + } + + /** + * Create new Derby Database with URL and schema name. + * @param jdbcUrl JDBC url. + * @param schemaName Schema name + * @throws ClassNotFoundException on not finding driver class. + * @throws SQLException on SQL exception. + */ + public static void createNewDerbyDatabase(String jdbcUrl, String schemaName) + throws ClassNotFoundException, SQLException { + System.setProperty("derby.stream.error.method", + DERBY_DISABLE_LOG_METHOD); + Class.forName(DERBY_DRIVER_CLASS); + try(Connection connection = DriverManager.getConnection(jdbcUrl + + ";user=" + schemaName + + ";create=true")) { + LOG.info("Created derby database at {}.", jdbcUrl); + } + } + + /** + * Used to suppress embedded derby database logging. + * @return No-Op output stream. + */ + public static OutputStream disableDerbyLogFile(){ + return new OutputStream() { + public void write(int b) throws IOException { + // Ignore all log messages + } + }; + } + + /** + * Helper function to check if table exists through JOOQ. + */ + public static final BiPredicate TABLE_EXISTS_CHECK = + (conn, tableName) -> { + try { + DSL.using(conn).select(count()).from(tableName).execute(); + } catch (DataAccessException ex) { + LOG.debug(ex.getMessage()); + return false; + } + LOG.info("{} table already exists, skipping creation.", tableName); + return true; + }; +} diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java index 243cb2443fee..ed60094b58ad 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import com.google.inject.Inject; import com.google.inject.Singleton; import org.jooq.DSLContext; @@ -35,9 +37,9 @@ public class ContainerSchemaDefinition implements ReconSchemaDefinition { public static final String CONTAINER_HISTORY_TABLE_NAME = - "container_history"; + "CONTAINER_HISTORY"; public static final String MISSING_CONTAINERS_TABLE_NAME = - "missing_containers"; + "MISSING_CONTAINERS"; private static final String CONTAINER_ID = "container_id"; private final DataSource dataSource; private DSLContext dslContext; @@ -51,8 +53,12 @@ public class ContainerSchemaDefinition implements ReconSchemaDefinition { public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); dslContext = DSL.using(conn); - createContainerHistoryTable(); - createMissingContainersTable(); + if (!TABLE_EXISTS_CHECK.test(conn, CONTAINER_HISTORY_TABLE_NAME)) { + createContainerHistoryTable(); + } + if (!TABLE_EXISTS_CHECK.test(conn, MISSING_CONTAINERS_TABLE_NAME)) { + createMissingContainersTable(); + } } /** diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java index eec3cd5d134b..45fc1ba0d73b 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import java.sql.Connection; import java.sql.SQLException; @@ -37,7 +39,7 @@ public class ReconTaskSchemaDefinition implements ReconSchemaDefinition { public static final String RECON_TASK_STATUS_TABLE_NAME = - "recon_task_status"; + "RECON_TASK_STATUS"; private final DataSource dataSource; @Inject @@ -48,14 +50,16 @@ public class ReconTaskSchemaDefinition implements ReconSchemaDefinition { @Override public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createReconTaskStatus(conn); + if (!TABLE_EXISTS_CHECK.test(conn, RECON_TASK_STATUS_TABLE_NAME)) { + createReconTaskStatusTable(conn); + } } /** * Create the Recon Task Status table. * @param conn connection */ - private void createReconTaskStatus(Connection conn) { + private void createReconTaskStatusTable(Connection conn) { DSL.using(conn).createTableIfNotExists(RECON_TASK_STATUS_TABLE_NAME) .column("task_name", SQLDataType.VARCHAR(1024)) .column("last_updated_timestamp", SQLDataType.BIGINT) diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java index 406585dc07d3..adfaca626d33 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import com.google.inject.Inject; import com.google.inject.Singleton; import org.jooq.impl.DSL; @@ -33,7 +35,7 @@ @Singleton public class StatsSchemaDefinition implements ReconSchemaDefinition { - public static final String GLOBAL_STATS_TABLE_NAME = "global_stats"; + public static final String GLOBAL_STATS_TABLE_NAME = "GLOBAL_STATS"; private final DataSource dataSource; @Inject @@ -44,7 +46,9 @@ public class StatsSchemaDefinition implements ReconSchemaDefinition { @Override public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createGlobalStatsTable(conn); + if (!TABLE_EXISTS_CHECK.test(conn, GLOBAL_STATS_TABLE_NAME)) { + createGlobalStatsTable(conn); + } } /** diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java index 95df8f736e86..941a3c635f01 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java @@ -17,14 +17,19 @@ */ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; import com.google.inject.Singleton; + import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.transaction.annotation.Transactional; import com.google.inject.Inject; @@ -35,13 +40,15 @@ @Singleton public class UtilizationSchemaDefinition implements ReconSchemaDefinition { + private static final Logger LOG = + LoggerFactory.getLogger(UtilizationSchemaDefinition.class); + private final DataSource dataSource; public static final String CLUSTER_GROWTH_DAILY_TABLE_NAME = - "cluster_growth_daily"; - + "CLUSTER_GROWTH_DAILY"; public static final String FILE_COUNT_BY_SIZE_TABLE_NAME = - "file_count_by_size"; + "FILE_COUNT_BY_SIZE"; @Inject UtilizationSchemaDefinition(DataSource dataSource) { @@ -52,8 +59,12 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition { @Transactional public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createClusterGrowthTable(conn); - createFileSizeCount(conn); + if (!TABLE_EXISTS_CHECK.test(conn, FILE_COUNT_BY_SIZE_TABLE_NAME)) { + createFileSizeCountTable(conn); + } + if (!TABLE_EXISTS_CHECK.test(conn, CLUSTER_GROWTH_DAILY_TABLE_NAME)) { + createClusterGrowthTable(conn); + } } private void createClusterGrowthTable(Connection conn) { @@ -71,7 +82,7 @@ private void createClusterGrowthTable(Connection conn) { .execute(); } - private void createFileSizeCount(Connection conn) { + private void createFileSizeCountTable(Connection conn) { DSL.using(conn).createTableIfNotExists(FILE_COUNT_BY_SIZE_TABLE_NAME) .column("file_size", SQLDataType.BIGINT) .column("count", SQLDataType.BIGINT) diff --git a/hadoop-ozone/recon/pom.xml b/hadoop-ozone/recon/pom.xml index 159ea1ae8d23..ddb3c7d886e0 100644 --- a/hadoop-ozone/recon/pom.xml +++ b/hadoop-ozone/recon/pom.xml @@ -303,6 +303,11 @@ com.jolbox bonecp + + org.apache.derby + derby + 10.14.2.0 + org.xerial sqlite-jdbc diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java index 36e4cbd0331f..590997ec543d 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java @@ -19,17 +19,6 @@ import static org.apache.hadoop.hdds.scm.cli.ContainerOperationClient.newContainerRpcClient; import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_INTERNAL_SERVICE_ID; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_AUTO_COMMIT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_CONNECTION_TIMEOUT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_DRIVER; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_PASSWORD; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_USER; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_CONNECTION_AGE; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT; import java.io.IOException; import java.lang.reflect.Constructor; @@ -62,6 +51,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.ratis.protocol.ClientId; +import org.hadoop.ozone.recon.codegen.ReconSqlDbConfig; import org.hadoop.ozone.recon.schema.tables.daos.ClusterGrowthDailyDao; import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao; import org.hadoop.ozone.recon.schema.tables.daos.FileCountBySizeDao; @@ -187,73 +177,68 @@ StorageContainerLocationProtocol getSCMProtocol( DataSourceConfiguration getDataSourceConfiguration( final OzoneConfiguration ozoneConfiguration) { + ReconSqlDbConfig sqlDbConfig = + ozoneConfiguration.getObject(ReconSqlDbConfig.class); + return new DataSourceConfiguration() { @Override public String getDriverClass() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_DRIVER, - "org.sqlite.JDBC"); + return sqlDbConfig.getDriverClass(); } @Override public String getJdbcUrl() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_JDBC_URL); + return sqlDbConfig.getJdbcUrl(); } @Override public String getUserName() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_USER); + return sqlDbConfig.getUsername(); } @Override public String getPassword() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_PASSWORD); + return sqlDbConfig.getPassword(); } @Override public boolean setAutoCommit() { - return ozoneConfiguration.getBoolean( - OZONE_RECON_SQL_AUTO_COMMIT, false); + return sqlDbConfig.isAutoCommit(); } @Override public long getConnectionTimeout() { - return ozoneConfiguration.getLong( - OZONE_RECON_SQL_CONNECTION_TIMEOUT, 30000); + return sqlDbConfig.getConnectionTimeout(); } @Override public String getSqlDialect() { - return JooqPersistenceModule.DEFAULT_DIALECT.toString(); + return sqlDbConfig.getSqlDbDialect(); } @Override public Integer getMaxActiveConnections() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS, 10); + return sqlDbConfig.getMaxActiveConnections(); } @Override - public Integer getMaxConnectionAge() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_CONNECTION_AGE, 1800); + public long getMaxConnectionAge() { + return sqlDbConfig.getConnectionMaxAge(); } @Override - public Integer getMaxIdleConnectionAge() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE, 3600); + public long getMaxIdleConnectionAge() { + return sqlDbConfig.getConnectionIdleMaxAge(); } @Override public String getConnectionTestStatement() { - return ozoneConfiguration.get( - OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT, "SELECT 1"); + return sqlDbConfig.getIdleTestQuery(); } @Override - public Integer getIdleConnectionTestPeriod() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD, 60); + public long getIdleConnectionTestPeriod() { + return sqlDbConfig.getConnectionIdleTestPeriod(); } }; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java index 9d037c31ca73..d2eb8e1654f8 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java @@ -96,30 +96,6 @@ public final class ReconServerConfigKeys { public static final String RECON_OM_SNAPSHOT_TASK_FLUSH_PARAM = "recon.om.snapshot.task.flush.param"; - // Persistence properties - public static final String OZONE_RECON_SQL_DB_DRIVER = - "ozone.recon.sql.db.driver"; - public static final String OZONE_RECON_SQL_DB_JDBC_URL = - "ozone.recon.sql.db.jdbc.url"; - public static final String OZONE_RECON_SQL_DB_USER = - "ozone.recon.sql.db.username"; - public static final String OZONE_RECON_SQL_DB_PASSWORD = - "ozone.recon.sql.db.password"; - public static final String OZONE_RECON_SQL_AUTO_COMMIT = - "ozone.recon.sql.db.auto.commit"; - public static final String OZONE_RECON_SQL_CONNECTION_TIMEOUT = - "ozone.recon.sql.db.conn.timeout"; - public static final String OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS = - "ozone.recon.sql.db.conn.max.active"; - public static final String OZONE_RECON_SQL_MAX_CONNECTION_AGE = - "ozone.recon.sql.db.conn.max.age"; - public static final String OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE = - "ozone.recon.sql.db.conn.idle.max.age"; - public static final String OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD = - "ozone.recon.sql.db.conn.idle.test.period"; - public static final String OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT = - "ozone.recon.sql.db.conn.idle.test"; - public static final String OZONE_RECON_TASK_THREAD_COUNT_KEY = "ozone.recon.task.thread.count"; public static final int OZONE_RECON_TASK_THREAD_COUNT_DEFAULT = 5; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java index 54ef88860a69..7e97c4f76e51 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java @@ -66,12 +66,12 @@ public interface DataSourceConfiguration { /** * Sets the maximum connection age (in seconds). */ - Integer getMaxConnectionAge(); + long getMaxConnectionAge(); /** * Sets the maximum idle connection age (in seconds). */ - Integer getMaxIdleConnectionAge(); + long getMaxIdleConnectionAge(); /** * Statement specific to database, usually SELECT 1. @@ -81,5 +81,5 @@ public interface DataSourceConfiguration { /** * How often to test idle connections for being active (in seconds). */ - Integer getIdleConnectionTestPeriod(); + long getIdleConnectionTestPeriod(); } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java index b0b88470036c..42cde7d149d5 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java @@ -20,7 +20,6 @@ import javax.sql.DataSource; import org.apache.commons.lang3.StringUtils; -import org.sqlite.SQLiteDataSource; import com.google.inject.Inject; import com.google.inject.Provider; @@ -43,14 +42,14 @@ public class DefaultDataSourceProvider implements Provider { */ @Override public DataSource get() { - if (StringUtils.contains(configuration.getJdbcUrl(), "sqlite")) { - SQLiteDataSource ds = new SQLiteDataSource(); - ds.setUrl(configuration.getJdbcUrl()); - return ds; + String jdbcUrl = configuration.getJdbcUrl(); + if (StringUtils.contains(jdbcUrl, "derby")) { + return new DerbyDataSourceProvider(configuration).get(); + } else if (StringUtils.contains(jdbcUrl, "sqlite")) { + return new SqliteDataSourceProvider(configuration).get(); } BoneCPDataSource cpDataSource = new BoneCPDataSource(); - cpDataSource.setDriverClass(configuration.getDriverClass()); cpDataSource.setJdbcUrl(configuration.getJdbcUrl()); cpDataSource.setUsername(configuration.getUserName()); diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java new file mode 100644 index 000000000000..51678c011675 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ozone.recon.persistence; + +import static org.hadoop.ozone.recon.codegen.JooqCodeGenerator.RECON_SCHEMA_NAME; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; + +import javax.sql.DataSource; + +import org.apache.derby.jdbc.EmbeddedDataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.inject.Inject; +import com.google.inject.Provider; + +/** + * Provide a {@link javax.sql.DataSource} for the application. + */ +public class DerbyDataSourceProvider implements Provider { + + private static final Logger LOG = + LoggerFactory.getLogger(DerbyDataSourceProvider.class); + + private DataSourceConfiguration configuration; + + @Inject + DerbyDataSourceProvider(DataSourceConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public DataSource get() { + String jdbcUrl = configuration.getJdbcUrl(); + LOG.info("JDBC Url for Recon : {} ", jdbcUrl); + try { + createNewDerbyDatabase(jdbcUrl, RECON_SCHEMA_NAME); + } catch (Exception e) { + LOG.error("Error creating Recon Derby DB.", e); + } + EmbeddedDataSource dataSource = new EmbeddedDataSource(); + dataSource.setDatabaseName(jdbcUrl.split(":")[2]); + dataSource.setUser(RECON_SCHEMA_NAME); + return dataSource; + } +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java index f7ab4a5b6fe6..a28cdf25e96b 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java @@ -45,7 +45,7 @@ public class JooqPersistenceModule extends AbstractModule { private Provider configurationProvider; - public static final SQLDialect DEFAULT_DIALECT = SQLDialect.SQLITE; + public static final SQLDialect DEFAULT_DIALECT = SQLDialect.DERBY; public JooqPersistenceModule( Provider configurationProvider) { diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java new file mode 100644 index 000000000000..897f8be8c7fa --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon.persistence; + +import javax.sql.DataSource; + +import org.sqlite.SQLiteDataSource; + +import com.google.inject.Inject; +import com.google.inject.Provider; + +/** + * Provide a {@link javax.sql.DataSource} for the application. + */ +public class SqliteDataSourceProvider implements Provider { + + private DataSourceConfiguration configuration; + + @Inject + public SqliteDataSourceProvider(DataSourceConfiguration configuration) { + this.configuration = configuration; + } + + /** + * Create a pooled datasource for the application. + *

+ * Default sqlite database does not work with a connection pool, actually + * most embedded databases do not, hence returning native implementation for + * default db. + */ + @Override + public DataSource get() { + SQLiteDataSource ds = new SQLiteDataSource(); + ds.setUrl(configuration.getJdbcUrl()); + return ds; + } +} \ No newline at end of file diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java index f8768dce0f74..664a7321d9a2 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.ozone.recon.persistence; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; + import java.io.File; import java.io.IOException; import java.sql.Connection; @@ -56,10 +58,22 @@ public class AbstractReconSqlDBTest { private Injector injector; private DSLContext dslContext; + private Provider configurationProvider; public AbstractReconSqlDBTest() { try { temporaryFolder.create(); + configurationProvider = + new DerbyDataSourceConfigurationProvider(temporaryFolder.newFolder()); + } catch (IOException e) { + Assert.fail(); + } + } + + protected AbstractReconSqlDBTest(Provider provider) { + try { + temporaryFolder.create(); + configurationProvider = provider; } catch (IOException e) { Assert.fail(); } @@ -76,12 +90,9 @@ public void createReconSchemaForTest() throws IOException { /** * Get set of Guice modules needed to setup a Recon SQL DB. * @return List of modules. - * @throws IOException on Error. */ - public List getReconSqlDBModules() throws IOException { + public List getReconSqlDBModules() { List modules = new ArrayList<>(); - DataSourceConfigurationProvider configurationProvider = - new DataSourceConfigurationProvider(temporaryFolder.newFolder()); modules.add(new JooqPersistenceModule(configurationProvider)); modules.add(new AbstractModule() { @Override @@ -143,14 +154,14 @@ protected T getSchemaDefinition(Class type) { } /** - * Local Sqlite datasource provider. + * Local Derby datasource provider. */ - public static class DataSourceConfigurationProvider implements + public static class DerbyDataSourceConfigurationProvider implements Provider { private final File tempDir; - public DataSourceConfigurationProvider(File tempDir) { + public DerbyDataSourceConfigurationProvider(File tempDir) { this.tempDir = tempDir; } @@ -159,13 +170,13 @@ public DataSourceConfiguration get() { return new DataSourceConfiguration() { @Override public String getDriverClass() { - return "org.sqlite.JDBC"; + return DERBY_DRIVER_CLASS; } @Override public String getJdbcUrl() { - return "jdbc:sqlite:" + tempDir.getAbsolutePath() + - File.separator + "sqlite_recon.db"; + return "jdbc:derby:" + tempDir.getAbsolutePath() + + File.separator + "derby_recon.db"; } @Override @@ -190,7 +201,7 @@ public long getConnectionTimeout() { @Override public String getSqlDialect() { - return SQLDialect.SQLITE.toString(); + return SQLDialect.DERBY.toString(); } @Override @@ -199,12 +210,12 @@ public Integer getMaxActiveConnections() { } @Override - public Integer getMaxConnectionAge() { + public long getMaxConnectionAge() { return 120; } @Override - public Integer getMaxIdleConnectionAge() { + public long getMaxIdleConnectionAge() { return 120; } @@ -214,7 +225,7 @@ public String getConnectionTestStatement() { } @Override - public Integer getIdleConnectionTestPeriod() { + public long getIdleConnectionTestPeriod() { return 30; } }; diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java index a771edd3b58e..befd1edb0ebe 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java @@ -52,9 +52,9 @@ public void testSchemaCreated() throws Exception { expectedPairs.add(new ImmutablePair<>("task_name", Types.VARCHAR)); expectedPairs.add(new ImmutablePair<>("last_updated_timestamp", - Types.INTEGER)); + Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("last_updated_seq_number", - Types.INTEGER)); + Types.BIGINT)); List> actualPairs = new ArrayList<>(); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java new file mode 100644 index 000000000000..12b9659cd5fd --- /dev/null +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java @@ -0,0 +1,159 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.ozone.recon.persistence; + +import static java.util.stream.Collectors.toList; +import static org.apache.hadoop.ozone.recon.ReconControllerModule.ReconDaoBindingModule.RECON_DAO_LIST; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.SQLITE_DRIVER_CLASS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.File; +import java.io.IOException; +import java.sql.SQLException; +import java.util.stream.Stream; + +import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao; +import org.hadoop.ozone.recon.schema.tables.pojos.ReconTaskStatus; +import org.jooq.SQLDialect; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.google.inject.Provider; + +/** + * Test Recon schema with different DBs. + */ +@RunWith(Parameterized.class) +public class TestReconWithDifferentSqlDBs extends AbstractReconSqlDBTest { + + public TestReconWithDifferentSqlDBs( + Provider provider) { + super(provider); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable parameters() throws IOException { + TemporaryFolder temporaryFolder = new TemporaryFolder(); + temporaryFolder.create(); + return Stream.of( + new DerbyDataSourceConfigurationProvider(temporaryFolder.newFolder()), + new SqliteDataSourceConfigurationProvider(temporaryFolder.newFolder())) + .map(each -> new Object[] {each}) + .collect(toList()); + } + + /** + * Make sure schema was created correctly. + * @throws SQLException + */ + @Test + public void testSchemaSetup() throws SQLException { + assertNotNull(getInjector()); + assertNotNull(getConfiguration()); + assertNotNull(getDslContext()); + assertNotNull(getConnection()); + RECON_DAO_LIST.forEach(dao -> { + assertNotNull(getDao(dao)); + }); + ReconTaskStatusDao dao = getDao(ReconTaskStatusDao.class); + dao.insert(new ReconTaskStatus("TestTask", 1L, 2L)); + assertEquals(1, dao.findAll().size()); + } + + /** + * Local Sqlite datasource provider. + */ + public static class SqliteDataSourceConfigurationProvider implements + Provider { + + private final File tempDir; + + public SqliteDataSourceConfigurationProvider(File tempDir) { + this.tempDir = tempDir; + } + + @Override + public DataSourceConfiguration get() { + return new DataSourceConfiguration() { + @Override + public String getDriverClass() { + return SQLITE_DRIVER_CLASS; + } + + @Override + public String getJdbcUrl() { + return "jdbc:sqlite:" + tempDir.getAbsolutePath() + + File.separator + "recon_sqlite.db"; + } + + @Override + public String getUserName() { + return null; + } + + @Override + public String getPassword() { + return null; + } + + @Override + public boolean setAutoCommit() { + return true; + } + + @Override + public long getConnectionTimeout() { + return 10000; + } + + @Override + public String getSqlDialect() { + return SQLDialect.SQLITE.toString(); + } + + @Override + public Integer getMaxActiveConnections() { + return 2; + } + + @Override + public long getMaxConnectionAge() { + return 120; + } + + @Override + public long getMaxIdleConnectionAge() { + return 120; + } + + @Override + public String getConnectionTestStatement() { + return "SELECT 1"; + } + + @Override + public long getIdleConnectionTestPeriod() { + return 30; + } + }; + } + } +} diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java index bb82119df09f..af08383dabbf 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java @@ -50,9 +50,9 @@ public void testIfStatsSchemaCreated() throws Exception { List> expectedPairs = new ArrayList<>(); expectedPairs.add(new ImmutablePair<>("key", Types.VARCHAR)); - expectedPairs.add(new ImmutablePair<>("value", Types.INTEGER)); + expectedPairs.add(new ImmutablePair<>("value", Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("last_updated_timestamp", - Types.VARCHAR)); + Types.TIMESTAMP)); List> actualPairs = new ArrayList<>(); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java index ea2d08cab0a4..9e781da03be1 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java @@ -58,12 +58,12 @@ public void testReconSchemaCreated() throws Exception { List> expectedPairs = new ArrayList<>(); - expectedPairs.add(new ImmutablePair<>("timestamp", Types.VARCHAR)); + expectedPairs.add(new ImmutablePair<>("timestamp", Types.TIMESTAMP)); expectedPairs.add(new ImmutablePair<>("datanode_id", Types.INTEGER)); expectedPairs.add(new ImmutablePair<>("datanode_host", Types.VARCHAR)); expectedPairs.add(new ImmutablePair<>("rack_id", Types.VARCHAR)); - expectedPairs.add(new ImmutablePair<>("available_size", Types.INTEGER)); - expectedPairs.add(new ImmutablePair<>("used_size", Types.INTEGER)); + expectedPairs.add(new ImmutablePair<>("available_size", Types.BIGINT)); + expectedPairs.add(new ImmutablePair<>("used_size", Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("container_count", Types.INTEGER)); expectedPairs.add(new ImmutablePair<>("block_count", Types.INTEGER)); @@ -82,9 +82,9 @@ public void testReconSchemaCreated() throws Exception { List> expectedPairsFileCount = new ArrayList<>(); expectedPairsFileCount.add( - new ImmutablePair<>("file_size", Types.INTEGER)); + new ImmutablePair<>("file_size", Types.BIGINT)); expectedPairsFileCount.add( - new ImmutablePair<>("count", Types.INTEGER)); + new ImmutablePair<>("count", Types.BIGINT)); List> actualPairsFileCount = new ArrayList<>(); while(resultSetFileCount.next()) {