diff --git a/src/main/java/com/microsoft/sqlserver/jdbc/SQLServerDatabaseMetaData.java b/src/main/java/com/microsoft/sqlserver/jdbc/SQLServerDatabaseMetaData.java index 7a8ec4059e..5d96742176 100644 --- a/src/main/java/com/microsoft/sqlserver/jdbc/SQLServerDatabaseMetaData.java +++ b/src/main/java/com/microsoft/sqlserver/jdbc/SQLServerDatabaseMetaData.java @@ -64,8 +64,12 @@ public final class SQLServerDatabaseMetaData implements java.sql.DatabaseMetaDat // uniqueidentifier https://msdn.microsoft.com/en-us/library/ms187942.aspx static final int UNIQUEIDENTIFIER_SIZE = 36; + // Stored procedure names for getting column metadata + private static final String SP_COLUMNS_170 = "sp_columns_170"; // SQL Server 2025 and later + private static final String SP_COLUMNS_100 = "sp_columns_100"; + enum CallableHandles { - SP_COLUMNS("{ call sp_columns(?, ?, ?, ?, ?) }", "{ call sp_columns_100(?, ?, ?, ?, ?, ?) }"), + SP_COLUMNS("{ call sp_columns(?, ?, ?, ?, ?) }", "{ call sp_columns_170(?, ?, ?, ?, ?, ?) }"), SP_COLUMN_PRIVILEGES("{ call sp_column_privileges(?, ?, ?, ?)}", "{ call sp_column_privileges(?, ?, ?, ?)}"), SP_TABLES("{ call sp_tables(?, ?, ?, ?) }", "{ call sp_tables(?, ?, ?, ?) }"), SP_SPECIAL_COLUMNS("{ call sp_special_columns (?, ?, ?, ?, ?, ?, ?)}", "{ call sp_special_columns_100 (?, ?, ?, ?, ?, ?, ?)}"), @@ -680,223 +684,358 @@ private static String escapeIDName(String inID) { return outID.toString(); } + /** + * getColumns() method to retrieve a description of table columns available in a catalog. + * This function try to use sp_columns_170 first, fall back to sp_columns_100 if needed. + * + * @param catalog + * a catalog name; "" retrieves those without a catalog; null means that the catalog name should not be used to narrow + * the search + * @param schema + * a schema name pattern; "" retrieves those without a schema; null means that the schema name should not be used to narrow + * the search + * @param table + * a table name pattern + * @param col + * a column name pattern + * @return ResultSet - each row is a column description + * @throws SQLException + */ @Override public java.sql.ResultSet getColumns(String catalog, String schema, String table, String col) throws SQLException { + if (loggerExternal.isLoggable(Level.FINER) && Util.isActivityTraceOn()) { loggerExternal.finer(toString() + ACTIVITY_ID + ActivityCorrelator.getCurrent().toString()); } + checkClosed(); + + try { + if (!connection.isAzureDW()) { + return getColumnsNonAzureDW(catalog, schema, table, col); + } else { + return getColumnsAzureDW(catalog, schema, table, col); + } + } catch (SQLException e) { + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer("getColumns() failed: " + e.getMessage()); + } + throw e; + } + } + + /** + * Helper method to get columns for regular SQL Server (non-Azure DW). + * Tries sp_columns_170 first, falls back to sp_columns_100 if needed. + */ + private java.sql.ResultSet getColumnsNonAzureDW(String catalog, String schema, String table, String col) + throws SQLException { + String originalCatalog = switchCatalogs(catalog); - if (!this.connection.isAzureDW()) { - String spColumnsSql = "DECLARE @mssqljdbc_temp_sp_columns_result TABLE(TABLE_QUALIFIER SYSNAME, TABLE_OWNER SYSNAME," - + "TABLE_NAME SYSNAME, COLUMN_NAME SYSNAME, DATA_TYPE SMALLINT, TYPE_NAME SYSNAME, PRECISION INT," - + "LENGTH INT, SCALE SMALLINT, RADIX SMALLINT, NULLABLE SMALLINT, REMARKS VARCHAR(254), COLUMN_DEF NVARCHAR(4000)," - + "SQL_DATA_TYPE SMALLINT, SQL_DATETIME_SUB SMALLINT, CHAR_OCTET_LENGTH INT, ORDINAL_POSITION INT," - + "IS_NULLABLE VARCHAR(254), SS_IS_SPARSE SMALLINT, SS_IS_COLUMN_SET SMALLINT, SS_IS_COMPUTED SMALLINT," - + "SS_IS_IDENTITY SMALLINT, SS_UDT_CATALOG_NAME NVARCHAR(128), SS_UDT_SCHEMA_NAME NVARCHAR(128)," - + "SS_UDT_ASSEMBLY_TYPE_NAME NVARCHAR(max), SS_XML_SCHEMACOLLECTION_CATALOG_NAME NVARCHAR(128)," - + "SS_XML_SCHEMACOLLECTION_SCHEMA_NAME NVARCHAR(128), SS_XML_SCHEMACOLLECTION_NAME NVARCHAR(128)," - + "SS_DATA_TYPE TINYINT);" - - + "INSERT INTO @mssqljdbc_temp_sp_columns_result EXEC sp_columns_100 ?,?,?,?,?,?;" - - + "SELECT TABLE_QUALIFIER AS TABLE_CAT, TABLE_OWNER AS TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, " - + "CAST(DATA_TYPE AS INT) AS DATA_TYPE,TYPE_NAME, PRECISION AS COLUMN_SIZE, LENGTH AS BUFFER_LENGTH, " - + "CAST(SCALE AS INT) AS DECIMAL_DIGITS, CAST(RADIX AS INT) AS NUM_PREC_RADIX,CAST(NULLABLE AS INT) AS NULLABLE, " - + "CAST(REMARKS AS VARCHAR) AS REMARKS, COLUMN_DEF, CAST(SQL_DATA_TYPE AS INT) AS SQL_DATA_TYPE, " - + "CAST(SQL_DATETIME_SUB AS INT) AS SQL_DATETIME_SUB, CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE," - + "CAST(NULL AS VARCHAR) AS SCOPE_CATALOG, CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA, CAST(NULL AS VARCHAR) AS SCOPE_TABLE, " - + "CAST(SS_DATA_TYPE AS SMALLINT) AS SOURCE_DATA_TYPE, " - + "CASE SS_IS_IDENTITY WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_AUTOINCREMENT," - + "CASE SS_IS_COMPUTED WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_GENERATEDCOLUMN, " - + "SS_IS_SPARSE, SS_IS_COLUMN_SET, SS_UDT_CATALOG_NAME, SS_UDT_SCHEMA_NAME, SS_UDT_ASSEMBLY_TYPE_NAME," - + "SS_XML_SCHEMACOLLECTION_CATALOG_NAME, SS_XML_SCHEMACOLLECTION_SCHEMA_NAME, SS_XML_SCHEMACOLLECTION_NAME " - + "FROM @mssqljdbc_temp_sp_columns_result ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION;"; - SQLServerResultSet rs = null; - PreparedStatement pstmt = (SQLServerPreparedStatement) this.connection.prepareStatement(spColumnsSql); + + String spColumnsProcName = SP_COLUMNS_170; + + String spColumnsSqlTemplate = "DECLARE @mssqljdbc_temp_sp_columns_result TABLE(TABLE_QUALIFIER SYSNAME, TABLE_OWNER SYSNAME," + + "TABLE_NAME SYSNAME, COLUMN_NAME SYSNAME, DATA_TYPE SMALLINT, TYPE_NAME SYSNAME, PRECISION INT," + + "LENGTH INT, SCALE SMALLINT, RADIX SMALLINT, NULLABLE SMALLINT, REMARKS VARCHAR(254), COLUMN_DEF NVARCHAR(4000)," + + "SQL_DATA_TYPE SMALLINT, SQL_DATETIME_SUB SMALLINT, CHAR_OCTET_LENGTH INT, ORDINAL_POSITION INT," + + "IS_NULLABLE VARCHAR(254), SS_IS_SPARSE SMALLINT, SS_IS_COLUMN_SET SMALLINT, SS_IS_COMPUTED SMALLINT," + + "SS_IS_IDENTITY SMALLINT, SS_UDT_CATALOG_NAME NVARCHAR(128), SS_UDT_SCHEMA_NAME NVARCHAR(128)," + + "SS_UDT_ASSEMBLY_TYPE_NAME NVARCHAR(max), SS_XML_SCHEMACOLLECTION_CATALOG_NAME NVARCHAR(128)," + + "SS_XML_SCHEMACOLLECTION_SCHEMA_NAME NVARCHAR(128), SS_XML_SCHEMACOLLECTION_NAME NVARCHAR(128)," + + "SS_DATA_TYPE TINYINT);" + + "INSERT INTO @mssqljdbc_temp_sp_columns_result EXEC %s ?,?,?,?,?,?;" + + "SELECT TABLE_QUALIFIER AS TABLE_CAT, TABLE_OWNER AS TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, " + + "CAST(DATA_TYPE AS INT) AS DATA_TYPE,TYPE_NAME, PRECISION AS COLUMN_SIZE, LENGTH AS BUFFER_LENGTH, " + + "CAST(SCALE AS INT) AS DECIMAL_DIGITS, CAST(RADIX AS INT) AS NUM_PREC_RADIX,CAST(NULLABLE AS INT) AS NULLABLE, " + + "CAST(REMARKS AS VARCHAR) AS REMARKS, COLUMN_DEF, CAST(SQL_DATA_TYPE AS INT) AS SQL_DATA_TYPE, " + + "CAST(SQL_DATETIME_SUB AS INT) AS SQL_DATETIME_SUB, CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE," + + "CAST(NULL AS VARCHAR) AS SCOPE_CATALOG, CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA, CAST(NULL AS VARCHAR) AS SCOPE_TABLE, " + + "CAST(SS_DATA_TYPE AS SMALLINT) AS SOURCE_DATA_TYPE, " + + "CASE SS_IS_IDENTITY WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_AUTOINCREMENT," + + "CASE SS_IS_COMPUTED WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_GENERATEDCOLUMN, " + + "SS_IS_SPARSE, SS_IS_COLUMN_SET, SS_UDT_CATALOG_NAME, SS_UDT_SCHEMA_NAME, SS_UDT_ASSEMBLY_TYPE_NAME," + + "SS_XML_SCHEMACOLLECTION_CATALOG_NAME, SS_XML_SCHEMACOLLECTION_SCHEMA_NAME, SS_XML_SCHEMACOLLECTION_NAME " + + "FROM @mssqljdbc_temp_sp_columns_result ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION;"; + + SQLServerResultSet rs = null; + PreparedStatement pstmt = null; + + try { + pstmt = (SQLServerPreparedStatement) this.connection + .prepareStatement(String.format(spColumnsSqlTemplate, spColumnsProcName)); pstmt.closeOnCompletion(); - try { - pstmt.setString(1, (null != table && !table.isEmpty()) ? escapeIDName(table) : "%"); - pstmt.setString(2, (null != schema && !schema.isEmpty()) ? escapeIDName(schema) : "%"); - pstmt.setString(3, (null != catalog && !catalog.isEmpty()) ? catalog : this.connection.getCatalog()); - pstmt.setString(4, (null != col && !col.isEmpty()) ? escapeIDName(col) : "%"); - pstmt.setInt(5, 2);// show sparse columns - pstmt.setInt(6, 3);// odbc version + setColumnsParameters(pstmt, table, schema, catalog, col); + + try { rs = (SQLServerResultSet) pstmt.executeQuery(); - rs.getColumn(5).setFilter(new DataTypeFilter()); - rs.getColumn(7).setFilter(new ZeroFixupFilter()); - rs.getColumn(8).setFilter(new ZeroFixupFilter()); - rs.getColumn(16).setFilter(new ZeroFixupFilter()); + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer("Successfully executed " + spColumnsProcName); + } } catch (SQLException e) { - if (null != pstmt) { - try { - pstmt.close(); - } catch (SQLServerException ignore) { - if (loggerExternal.isLoggable(Level.FINER)) { - loggerExternal.finer( - "getColumns() threw an exception when attempting to close PreparedStatement"); - } - } + // If getColumns() fails with sp_columns_170, fall back to sp_columns_100 + + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer(spColumnsProcName + " failed, falling back to sp_columns_100: " + e.getMessage()); } - throw e; - } finally { - if (null != originalCatalog) { - connection.setCatalog(originalCatalog); + + // fallback to SP_COLUMNS_100 + pstmt.close(); + spColumnsProcName = SP_COLUMNS_100; + + pstmt = (SQLServerPreparedStatement) this.connection + .prepareStatement(String.format(spColumnsSqlTemplate, spColumnsProcName)); + pstmt.closeOnCompletion(); + + setColumnsParameters(pstmt, table, schema, catalog, col); + + rs = (SQLServerResultSet) pstmt.executeQuery(); + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer("Successfully executed " + spColumnsProcName); } } - return rs; - } else { - /** - * Can't actually switchCatalogs on Azure DW. This is here to keep consistency in behavior with SQL Azure DB - * when user provides a different catalog than the one they're currently connected to. Will throw exception - * when it's different and do nothing if it's the same/null. - */ - LOCK.lock(); - try { - if (null == getColumnsDWColumns) { - getColumnsDWColumns = new LinkedHashMap<>(); - getColumnsDWColumns.put(1, TABLE_CAT); - getColumnsDWColumns.put(2, TABLE_SCHEM); - getColumnsDWColumns.put(3, TABLE_NAME); - getColumnsDWColumns.put(4, COLUMN_NAME); - getColumnsDWColumns.put(5, DATA_TYPE); - getColumnsDWColumns.put(6, TYPE_NAME); - getColumnsDWColumns.put(7, COLUMN_SIZE); - getColumnsDWColumns.put(8, BUFFER_LENGTH); - getColumnsDWColumns.put(9, DECIMAL_DIGITS); - getColumnsDWColumns.put(10, NUM_PREC_RADIX); - getColumnsDWColumns.put(11, NULLABLE); - getColumnsDWColumns.put(12, REMARKS); - getColumnsDWColumns.put(13, COLUMN_DEF); - getColumnsDWColumns.put(14, SQL_DATA_TYPE); - getColumnsDWColumns.put(15, SQL_DATETIME_SUB); - getColumnsDWColumns.put(16, CHAR_OCTET_LENGTH); - getColumnsDWColumns.put(17, ORDINAL_POSITION); - getColumnsDWColumns.put(18, IS_NULLABLE); - /* - * Use negative value keys to indicate that this column doesn't exist in SQL Server and should just - * be queried as 'NULL' - */ - getColumnsDWColumns.put(-1, SCOPE_CATALOG); - getColumnsDWColumns.put(-2, SCOPE_SCHEMA); - getColumnsDWColumns.put(-3, SCOPE_TABLE); - getColumnsDWColumns.put(29, SOURCE_DATA_TYPE); - getColumnsDWColumns.put(22, IS_AUTOINCREMENT); - getColumnsDWColumns.put(21, IS_GENERATEDCOLUMN); - getColumnsDWColumns.put(19, SS_IS_SPARSE); - getColumnsDWColumns.put(20, SS_IS_COLUMN_SET); - getColumnsDWColumns.put(23, SS_UDT_CATALOG_NAME); - getColumnsDWColumns.put(24, SS_UDT_SCHEMA_NAME); - getColumnsDWColumns.put(25, SS_UDT_ASSEMBLY_TYPE_NAME); - getColumnsDWColumns.put(26, SS_XML_SCHEMACOLLECTION_CATALOG_NAME); - getColumnsDWColumns.put(27, SS_XML_SCHEMACOLLECTION_SCHEMA_NAME); - getColumnsDWColumns.put(28, SS_XML_SCHEMACOLLECTION_NAME); - } - if (null == getTypesDWColumns) { - getTypesDWColumns = new LinkedHashMap<>(); - getTypesDWColumns.put(1, NVARCHAR); // TABLE_CAT - getTypesDWColumns.put(2, NVARCHAR); // TABLE_SCHEM - getTypesDWColumns.put(3, NVARCHAR); // TABLE_NAME - getTypesDWColumns.put(4, NVARCHAR); // COLUMN_NAME - getTypesDWColumns.put(5, INTEGER); // DATA_TYPE - getTypesDWColumns.put(6, NVARCHAR); // TYPE_NAME - getTypesDWColumns.put(7, INTEGER); // COLUMN_SIZE - getTypesDWColumns.put(8, INTEGER); // BUFFER_LENGTH - getTypesDWColumns.put(9, INTEGER); // DECIMAL_DIGITS - getTypesDWColumns.put(10, INTEGER); // NUM_PREC_RADIX - getTypesDWColumns.put(11, INTEGER); // NULLABLE - getTypesDWColumns.put(12, VARCHAR); // REMARKS - getTypesDWColumns.put(13, NVARCHAR); // COLUMN_DEF - getTypesDWColumns.put(14, INTEGER); // SQL_DATA_TYPE - getTypesDWColumns.put(15, INTEGER); // SQL_DATETIME_SUB - getTypesDWColumns.put(16, INTEGER); // CHAR_OCTET_LENGTH - getTypesDWColumns.put(17, INTEGER); // ORDINAL_POSITION - getTypesDWColumns.put(18, VARCHAR); // IS_NULLABLE - /* - * Use negative value keys to indicate that this column doesn't exist in SQL Server and should just - * be queried as 'NULL' - */ - getTypesDWColumns.put(-1, VARCHAR); // SCOPE_CATALOG - getTypesDWColumns.put(-2, VARCHAR); // SCOPE_SCHEMA - getTypesDWColumns.put(-3, VARCHAR); // SCOPE_TABLE - getTypesDWColumns.put(29, SMALLINT); // SOURCE_DATA_TYPE - getTypesDWColumns.put(22, VARCHAR); // IS_AUTOINCREMENT - getTypesDWColumns.put(21, VARCHAR); // IS_GENERATEDCOLUMN - getTypesDWColumns.put(19, SMALLINT); // SS_IS_SPARSE - getTypesDWColumns.put(20, SMALLINT); // SS_IS_COLUMN_SET - getTypesDWColumns.put(23, NVARCHAR); // SS_UDT_CATALOG_NAME - getTypesDWColumns.put(24, NVARCHAR); // SS_UDT_SCHEMA_NAME - getTypesDWColumns.put(25, NVARCHAR); // SS_UDT_ASSEMBLY_TYPE_NAME - getTypesDWColumns.put(26, NVARCHAR); // SS_XML_SCHEMACOLLECTION_CATALOG_NAME - getTypesDWColumns.put(27, NVARCHAR); // SS_XML_SCHEMACOLLECTION_SCHEMA_NAME - getTypesDWColumns.put(28, NVARCHAR); // SS_XML_SCHEMACOLLECTION_NAME + // Set filters on relevant columns + applyColumnsFilters(rs); + + } catch (SQLException e) { + if (null != pstmt) { + try { + pstmt.close(); + } catch (SQLServerException ignore) { + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer( + "getColumns() threw an exception when attempting to close PreparedStatement"); + } } + } + throw e; + } finally { + if (originalCatalog != null) { + connection.setCatalog(originalCatalog); + } + } + return rs; + } + + /** + * Helper method to get columns for Azure DW. + * Tries sp_columns_170 first, falls back to sp_columns_100 if needed. + */ + private java.sql.ResultSet getColumnsAzureDW(String catalog, String schema, String table, String col) + throws SQLException { + + /** + * Can't actually switchCatalogs on Azure DW. This is here to keep consistency in behavior with SQL Azure DB + * when user provides a different catalog than the one they're currently connected to. Will throw exception + * when it's different and do nothing if it's the same/null. + */ + LOCK.lock(); + try { + if (null == getColumnsDWColumns) { + getColumnsDWColumns = new LinkedHashMap<>(); + getColumnsDWColumns.put(1, TABLE_CAT); + getColumnsDWColumns.put(2, TABLE_SCHEM); + getColumnsDWColumns.put(3, TABLE_NAME); + getColumnsDWColumns.put(4, COLUMN_NAME); + getColumnsDWColumns.put(5, DATA_TYPE); + getColumnsDWColumns.put(6, TYPE_NAME); + getColumnsDWColumns.put(7, COLUMN_SIZE); + getColumnsDWColumns.put(8, BUFFER_LENGTH); + getColumnsDWColumns.put(9, DECIMAL_DIGITS); + getColumnsDWColumns.put(10, NUM_PREC_RADIX); + getColumnsDWColumns.put(11, NULLABLE); + getColumnsDWColumns.put(12, REMARKS); + getColumnsDWColumns.put(13, COLUMN_DEF); + getColumnsDWColumns.put(14, SQL_DATA_TYPE); + getColumnsDWColumns.put(15, SQL_DATETIME_SUB); + getColumnsDWColumns.put(16, CHAR_OCTET_LENGTH); + getColumnsDWColumns.put(17, ORDINAL_POSITION); + getColumnsDWColumns.put(18, IS_NULLABLE); + /* + * Use negative value keys to indicate that this column doesn't exist in SQL Server + * and should just be queried as 'NULL' + */ + getColumnsDWColumns.put(-1, SCOPE_CATALOG); + getColumnsDWColumns.put(-2, SCOPE_SCHEMA); + getColumnsDWColumns.put(-3, SCOPE_TABLE); + getColumnsDWColumns.put(29, SOURCE_DATA_TYPE); + getColumnsDWColumns.put(22, IS_AUTOINCREMENT); + getColumnsDWColumns.put(21, IS_GENERATEDCOLUMN); + getColumnsDWColumns.put(19, SS_IS_SPARSE); + getColumnsDWColumns.put(20, SS_IS_COLUMN_SET); + getColumnsDWColumns.put(23, SS_UDT_CATALOG_NAME); + getColumnsDWColumns.put(24, SS_UDT_SCHEMA_NAME); + getColumnsDWColumns.put(25, SS_UDT_ASSEMBLY_TYPE_NAME); + getColumnsDWColumns.put(26, SS_XML_SCHEMACOLLECTION_CATALOG_NAME); + getColumnsDWColumns.put(27, SS_XML_SCHEMACOLLECTION_SCHEMA_NAME); + getColumnsDWColumns.put(28, SS_XML_SCHEMACOLLECTION_NAME); + } + if (null == getTypesDWColumns) { + getTypesDWColumns = new LinkedHashMap<>(); + getTypesDWColumns.put(1, NVARCHAR); // TABLE_CAT + getTypesDWColumns.put(2, NVARCHAR); // TABLE_SCHEM + getTypesDWColumns.put(3, NVARCHAR); // TABLE_NAME + getTypesDWColumns.put(4, NVARCHAR); // COLUMN_NAME + getTypesDWColumns.put(5, INTEGER); // DATA_TYPE + getTypesDWColumns.put(6, NVARCHAR); // TYPE_NAME + getTypesDWColumns.put(7, INTEGER); // COLUMN_SIZE + getTypesDWColumns.put(8, INTEGER); // BUFFER_LENGTH + getTypesDWColumns.put(9, INTEGER); // DECIMAL_DIGITS + getTypesDWColumns.put(10, INTEGER); // NUM_PREC_RADIX + getTypesDWColumns.put(11, INTEGER); // NULLABLE + getTypesDWColumns.put(12, VARCHAR); // REMARKS + getTypesDWColumns.put(13, NVARCHAR); // COLUMN_DEF + getTypesDWColumns.put(14, INTEGER); // SQL_DATA_TYPE + getTypesDWColumns.put(15, INTEGER); // SQL_DATETIME_SUB + getTypesDWColumns.put(16, INTEGER); // CHAR_OCTET_LENGTH + getTypesDWColumns.put(17, INTEGER); // ORDINAL_POSITION + getTypesDWColumns.put(18, VARCHAR); // IS_NULLABLE + /* + * Use negative value keys to indicate that this column doesn't exist in SQL Server + * and should just be queried as 'NULL' + */ + getTypesDWColumns.put(-1, VARCHAR); // SCOPE_CATALOG + getTypesDWColumns.put(-2, VARCHAR); // SCOPE_SCHEMA + getTypesDWColumns.put(-3, VARCHAR); // SCOPE_TABLE + getTypesDWColumns.put(29, SMALLINT); // SOURCE_DATA_TYPE + getTypesDWColumns.put(22, VARCHAR); // IS_AUTOINCREMENT + getTypesDWColumns.put(21, VARCHAR); // IS_GENERATEDCOLUMN + getTypesDWColumns.put(19, SMALLINT); // SS_IS_SPARSE + getTypesDWColumns.put(20, SMALLINT); // SS_IS_COLUMN_SET + getTypesDWColumns.put(23, NVARCHAR); // SS_UDT_CATALOG_NAME + getTypesDWColumns.put(24, NVARCHAR); // SS_UDT_SCHEMA_NAME + getTypesDWColumns.put(25, NVARCHAR); // SS_UDT_ASSEMBLY_TYPE_NAME + getTypesDWColumns.put(26, NVARCHAR); // SS_XML_SCHEMACOLLECTION_CATALOG_NAME + getTypesDWColumns.put(27, NVARCHAR); // SS_XML_SCHEMACOLLECTION_SCHEMA_NAME + getTypesDWColumns.put(28, NVARCHAR); // SS_XML_SCHEMACOLLECTION_NAME + } + + // Ensure there is a data type for every metadata column + if (getColumnsDWColumns.size() != getTypesDWColumns.size()) { + MessageFormat form = new MessageFormat( + SQLServerException.getErrString("R_colCountNotMatchColTypeCount")); + Object[] msgArgs = { getColumnsDWColumns.size(), getTypesDWColumns.size() }; + throw new IllegalArgumentException(form.format(msgArgs)); + } + } finally { + LOCK.unlock(); + } - // Ensure there is a data type for every metadata column - if (getColumnsDWColumns.size() != getTypesDWColumns.size()) { - MessageFormat form = new MessageFormat( - SQLServerException.getErrString("R_colCountNotMatchColTypeCount")); - Object[] msgArgs = {getColumnsDWColumns.size(), getTypesDWColumns.size()}; - throw new IllegalArgumentException(form.format(msgArgs)); + String spColumnsProcName = SP_COLUMNS_170; + + try (PreparedStatement storedProcPstmt = this.connection + .prepareStatement("EXEC " + spColumnsProcName + " ?,?,?,?,?,?;")) { + + setColumnsParameters(storedProcPstmt, table, schema, catalog, col); + + try (ResultSet rs = storedProcPstmt.executeQuery()) { + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer("Successfully executed " + spColumnsProcName); } - } finally { - LOCK.unlock(); + return buildAzureDWResultSet(rs); + } + } catch (SQLException primaryEx) { + + // If sp_columns_170 fails on Azure DW, fallback to sp_columns_100 + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer(spColumnsProcName + " failed on Azure DW, falling back to sp_columns_100: " + + primaryEx.getMessage()); } + spColumnsProcName = SP_COLUMNS_100; try (PreparedStatement storedProcPstmt = this.connection - .prepareStatement("EXEC sp_columns_100 ?,?,?,?,?,?;")) { - storedProcPstmt.setString(1, (null != table && !table.isEmpty()) ? escapeIDName(table) : "%"); - storedProcPstmt.setString(2, (null != schema && !schema.isEmpty()) ? escapeIDName(schema) : "%"); - storedProcPstmt.setString(3, - (null != catalog && !catalog.isEmpty()) ? catalog : this.connection.getCatalog()); - storedProcPstmt.setString(4, (null != col && !col.isEmpty()) ? escapeIDName(col) : "%"); - storedProcPstmt.setInt(5, 2);// show sparse columns - storedProcPstmt.setInt(6, 3);// odbc version - - SQLServerResultSet userRs = null; - PreparedStatement resultPstmt = null; + .prepareStatement("EXEC " + spColumnsProcName + " ?,?,?,?,?,?;")) { + + setColumnsParameters(storedProcPstmt, table, schema, catalog, col); + try (ResultSet rs = storedProcPstmt.executeQuery()) { - StringBuilder azureDwSelectBuilder = new StringBuilder(); - boolean isFirstRow = true; // less expensive than continuously checking isFirst() - while (rs.next()) { - if (!isFirstRow) { - azureDwSelectBuilder.append(" UNION ALL "); - } - azureDwSelectBuilder.append(generateAzureDWSelect(rs, getColumnsDWColumns, getTypesDWColumns)); - isFirstRow = false; + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer("Successfully executed " + spColumnsProcName); } + return buildAzureDWResultSet(rs); + } + } + } + } - if (0 == azureDwSelectBuilder.length()) { - azureDwSelectBuilder.append(generateAzureDWEmptyRS(getColumnsDWColumns)); - } else { - azureDwSelectBuilder.append(" ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION "); - } + /** + * Helper method to set parameters for columns stored procedure calls. + * @param pstmt the prepared statement + * @param table the table name + * @param schema the schema name + * @param catalog the catalog name + * @param col the column name + * @throws SQLException + */ + private void setColumnsParameters(PreparedStatement pstmt, String table, String schema, String catalog, String col) + throws SQLException { + pstmt.setString(1, (null != table && !table.isEmpty()) ? escapeIDName(table) : "%"); + pstmt.setString(2, (null != schema && !schema.isEmpty()) ? escapeIDName(schema) : "%"); + pstmt.setString(3, (null != catalog && !catalog.isEmpty()) ? catalog : this.connection.getCatalog()); + pstmt.setString(4, (null != col && !col.isEmpty()) ? escapeIDName(col) : "%"); + pstmt.setInt(5, 2);// show sparse columns + pstmt.setInt(6, 3);// odbc version + } - resultPstmt = (SQLServerPreparedStatement) this.connection - .prepareStatement(azureDwSelectBuilder.toString()); - userRs = (SQLServerResultSet) resultPstmt.executeQuery(); - resultPstmt.closeOnCompletion(); - userRs.getColumn(5).setFilter(new DataTypeFilter()); - userRs.getColumn(7).setFilter(new ZeroFixupFilter()); - userRs.getColumn(8).setFilter(new ZeroFixupFilter()); - userRs.getColumn(16).setFilter(new ZeroFixupFilter()); - } catch (SQLException e) { - if (null != resultPstmt) { - try { - resultPstmt.close(); - } catch (SQLServerException ignore) { - if (loggerExternal.isLoggable(Level.FINER)) { - loggerExternal.finer( - "getColumns() threw an exception when attempting to close PreparedStatement"); - } - } + /** + * Helper method to apply filters to the result set columns. + * @param rs the result set + * @throws SQLException + */ + private void applyColumnsFilters(SQLServerResultSet rs) throws SQLException { + rs.getColumn(5).setFilter(new DataTypeFilter()); + rs.getColumn(7).setFilter(new ZeroFixupFilter()); + rs.getColumn(8).setFilter(new ZeroFixupFilter()); + rs.getColumn(16).setFilter(new ZeroFixupFilter()); + } + + /** + * Helper method to build the Azure DW result set from stored procedure results. + * @param rs the result set from the stored procedure + * @return the final result set + * @throws SQLException + */ + private SQLServerResultSet buildAzureDWResultSet(ResultSet rs) throws SQLException { + StringBuilder azureDwSelectBuilder = new StringBuilder(); + boolean isFirstRow = true; // less expensive than continuously checking isFirst() + + while (rs.next()) { + if (!isFirstRow) { + azureDwSelectBuilder.append(" UNION ALL "); + } + azureDwSelectBuilder.append(generateAzureDWSelect(rs, getColumnsDWColumns, getTypesDWColumns)); + isFirstRow = false; + } + + if (0 == azureDwSelectBuilder.length()) { + azureDwSelectBuilder.append(generateAzureDWEmptyRS(getColumnsDWColumns)); + } else { + azureDwSelectBuilder.append(" ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION "); + } + + PreparedStatement resultPstmt = null; + try { + resultPstmt = (SQLServerPreparedStatement) this.connection + .prepareStatement(azureDwSelectBuilder.toString()); + SQLServerResultSet userRs = (SQLServerResultSet) resultPstmt.executeQuery(); + resultPstmt.closeOnCompletion(); + + applyColumnsFilters(userRs); + return userRs; + } catch (SQLException e) { + if (null != resultPstmt) { + try { + resultPstmt.close(); + } catch (SQLServerException ignore) { + if (loggerExternal.isLoggable(Level.FINER)) { + loggerExternal.finer( + "getColumns() threw an exception when attempting to close PreparedStatement"); } - throw e; } - return userRs; } + throw e; } } diff --git a/src/main/java/microsoft/sql/Types.java b/src/main/java/microsoft/sql/Types.java index 082b4de49a..8d9faf29d6 100644 --- a/src/main/java/microsoft/sql/Types.java +++ b/src/main/java/microsoft/sql/Types.java @@ -6,8 +6,21 @@ package microsoft.sql; /** - * Defines the constants that are used to identify the SQL types that are specific to Microsoft SQL Server. - * + * Defines the constants that are used to identify SQL types specific to Microsoft SQL Server. + * + * These type codes are used internally by the JDBC driver for type identification and do not + * correspond to the native SQL Server data type values. The driver uses these constants when + * reporting column types through ParameterMetaData and ResultSetMetaData interfaces. + * + * To retrieve the actual SQL Server native data type codes, use DatabaseMetaData.getColumns() + * and read the SQL_DATA_TYPE column from the result set. For example: + * + * ResultSet rs = metaData.getColumns(null, null, tableName, "%"); + * int sqlDataType = rs.getInt("SQL_DATA_TYPE"); + * + * The SQL_DATA_TYPE column contains the native SQL Server type codes as returned by the + * sp_columns stored procedure (sp_columns_170 on SQL Server 2025+, sp_columns_100 on older versions). + * * This class is never instantiated. */ public final class Types { diff --git a/src/test/java/com/microsoft/sqlserver/jdbc/databasemetadata/DatabaseMetaDataTest.java b/src/test/java/com/microsoft/sqlserver/jdbc/databasemetadata/DatabaseMetaDataTest.java index a4d98475c8..4076fa4864 100644 --- a/src/test/java/com/microsoft/sqlserver/jdbc/databasemetadata/DatabaseMetaDataTest.java +++ b/src/test/java/com/microsoft/sqlserver/jdbc/databasemetadata/DatabaseMetaDataTest.java @@ -32,6 +32,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.UUID; @@ -1386,6 +1387,146 @@ public void testJSONMetaData() throws SQLException { } } + /** + * Validates that all SQL Server data type columns are present in {@code DatabaseMetaData.getColumns()}. + * + * This test creates a table with various SQL Server-specific data types and verifies that + * all columns are returned by the {@code getColumns()} method. + * + * Note: The {@code SQL_DATA_TYPE} column contains the native SQL Server type code, which + * differs from the JDBC {@code DATA_TYPE} values. The JDBC driver maps these to + * {@code microsoft.sql.Types} constants for driver-specific types. + * + * @throws SQLException if a database access error occurs + */ + @Test + public void testGetColumnsDataTypesMapping() throws SQLException { + String dataTypesTableName = RandomUtil.getIdentifier("DataTypesMappingTable"); + try (Connection conn = getConnection(); + Statement stmt = conn.createStatement()) { + + String createTableSQL = "CREATE TABLE " + AbstractSQLGenerator.escapeIdentifier(dataTypesTableName) + " (" + + "col_datetimeoffset DATETIMEOFFSET(7), " + + "col_datetime DATETIME, " + + "col_smalldatetime SMALLDATETIME, " + + "col_money MONEY, " + + "col_smallmoney SMALLMONEY, " + + "col_guid UNIQUEIDENTIFIER, " + + "col_sql_variant SQL_VARIANT, " + + "col_geometry GEOMETRY, " + + "col_geography GEOGRAPHY " + + ")"; + + stmt.execute(createTableSQL); + + DatabaseMetaData databaseMetaData = conn.getMetaData(); + assertNotNull(databaseMetaData, "DatabaseMetaData should not be null"); + + // Expected column names from the CREATE TABLE statement + Set expectedColumns = new LinkedHashSet<>(); + expectedColumns.add("col_datetimeoffset"); + expectedColumns.add("col_datetime"); + expectedColumns.add("col_smalldatetime"); + expectedColumns.add("col_money"); + expectedColumns.add("col_smallmoney"); + expectedColumns.add("col_guid"); + expectedColumns.add("col_sql_variant"); + expectedColumns.add("col_geometry"); + expectedColumns.add("col_geography"); + + Set foundColumns = new HashSet<>(); + + try (ResultSet resultSet = databaseMetaData.getColumns(null, null, + dataTypesTableName, "%")) { + + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + String typeName = resultSet.getString("TYPE_NAME"); + + // SQL_DATA_TYPE contains native SQL Server type codes which differ from JDBC DATA_TYPE values. + // The JDBC driver maps these to microsoft.sql.Types constants for driver-specific types. + // int sqlDataType = resultSet.getInt("SQL_DATA_TYPE"); + // System.out.println("Column: " + columnName + ", TYPE_NAME: " + typeName + ", SQL_DATA_TYPE: " + sqlDataType); + + foundColumns.add(columnName); + } + } + + // Verify that all expected columns are present + assertEquals(expectedColumns.size(), foundColumns.size(), + "Number of columns found does not match expected"); + + for (String expectedColumn : expectedColumns) { + assertTrue(foundColumns.contains(expectedColumn), + "Column '" + expectedColumn + "' should be present in getColumns() result"); + } + } finally { + try (Connection conn = getConnection(); Statement stmt = conn.createStatement()) { + TestUtils.dropTableIfExists(dataTypesTableName, stmt); + } + } + } + + /** + * Validates that JSON and VECTOR columns are present in {@code DatabaseMetaData.getColumns()}. + * + * The {@code getColumns()} method internally calls the {@code sp_columns} system stored procedure + * to retrieve column metadata. JSON and VECTOR types are supported via {@code sp_columns_170}, + * which is available in SQL Server 2025 and later. In earlier versions or unsupported environments, + * the driver falls back to {@code sp_columns_100}, which returns {@code null} for these types. + * + * Note: The {@code DATA_TYPE} value returned by the JDBC driver may differ from the SQL Server + * native type code. To retrieve the actual SQL Server data type, access the {@code SQL_DATA_TYPE} + * column from the result set. + * + * @throws SQLException if a database access error occurs + */ + @Test + @vectorJsonTest + @Tag(Constants.xAzureSQLDB) + public void testGetColumnsMappingJsonAndVector() throws SQLException { + String dataTypesTableName = RandomUtil.getIdentifier("JsonVectorMappingTable"); + try (Connection conn = getConnection(); + Statement stmt = conn.createStatement()) { + + String createTableSQL = "CREATE TABLE " + AbstractSQLGenerator.escapeIdentifier(dataTypesTableName) + " (" + + "col_json JSON, " + + "col_vector VECTOR(3) " + + ")"; + + stmt.execute(createTableSQL); + + DatabaseMetaData databaseMetaData = conn.getMetaData(); + assertNotNull(databaseMetaData, "DatabaseMetaData should not be null"); + + boolean jsonColumnFound = false; + boolean vectorColumnFound = false; + + try (ResultSet resultSet = databaseMetaData.getColumns(null, null, + dataTypesTableName, "%")) { + + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + + if ("col_json".equals(columnName)) { + jsonColumnFound = true; + } else if ("col_vector".equals(columnName)) { + vectorColumnFound = true; + } + } + } + + // Verify that both columns are present in the result set + assertTrue(jsonColumnFound, "JSON column 'col_json' should be present in getColumns() result"); + assertTrue(vectorColumnFound, "VECTOR column 'col_vector' should be present in getColumns() result"); + + } finally { + try (Connection conn = getConnection(); Statement stmt = conn.createStatement()) { + TestUtils.dropTableIfExists(dataTypesTableName, stmt); + } + } + } + @BeforeAll public static void setupTable() throws Exception { setConnection(); diff --git a/src/test/java/com/microsoft/sqlserver/jdbc/datatypes/VectorTest.java b/src/test/java/com/microsoft/sqlserver/jdbc/datatypes/VectorTest.java index 4f4e3913b7..06b0009896 100644 --- a/src/test/java/com/microsoft/sqlserver/jdbc/datatypes/VectorTest.java +++ b/src/test/java/com/microsoft/sqlserver/jdbc/datatypes/VectorTest.java @@ -13,6 +13,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import java.lang.reflect.Field; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ParameterMetaData; @@ -97,24 +98,88 @@ private static void cleanupTest() throws SQLException { } } + /** + * Test to verify that the vector column is present in DatabaseMetaData.getColumns(). + * + * Note: The DATA_TYPE value returned by the JDBC driver may differ from the SQL Server + * native type code. To retrieve the actual SQL Server data type, access the + * {@code SQL_DATA_TYPE} column from the result set. + */ @Test + @Tag(Constants.xAzureSQLDB) public void testConnectionGetMetaData() throws Exception { DatabaseMetaData metaData = connection.getMetaData(); assertNotNull(metaData, "DatabaseMetaData should not be null"); - try (ResultSet rs = metaData.getColumns(null, null, tableName, "v")) { - - ResultSetMetaData rsMetaData = rs.getMetaData(); - int columnCount = rsMetaData.getColumnCount(); + try (ResultSet rs = metaData.getColumns(null, null, tableName, "%")) { + boolean vectorColumnFound = false; while (rs.next()) { - for (int i = 1; i <= columnCount; i++) { - System.out.println(rsMetaData.getColumnName(i) + ": " + rs.getString(i)); + for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) { + String columnName = rs.getMetaData().getColumnName(i); + String columnValue = rs.getString(i); + // Uncomment the line below to see all column names and values + // System.out.println(columnName + " : " + columnValue); + } + // Below will show vector data type column but right now sp_columns_170 + // is not introduced in SQL DB instances which will return null. + // For now below assertion test runs only for SQL Server 2025 instance. + + if ("v".equalsIgnoreCase(rs.getString("COLUMN_NAME"))) { + vectorColumnFound = true; } - System.out.println(); } + // Verify that the vector column "v" is present in the result set + // when running against SQL Server 2025 or later + assertTrue(vectorColumnFound, "Vector column 'v' found in metadata"); + } + } + + /** + * Test to verify that the vector column is present in DatabaseMetaData.getColumns() for Azure DW. + * Added this to increase code coverage for Azure DW code path in getColumns method. + */ + @Test + @Tag(Constants.xAzureSQLDB) + public void testConnectionGetMetaDataAzureDW() throws Exception { + try (SQLServerConnection conn = getConnection()) { + + // Use reflection to simulate Azure DW connection + Field f1 = SQLServerConnection.class.getDeclaredField("isAzureDW"); + f1.setAccessible(true); + f1.set(conn, true); + + // Set isAzure to true as well since some code paths check both + Field f2 = SQLServerConnection.class.getDeclaredField("isAzure"); + f2.setAccessible(true); + f2.set(conn, true); + + DatabaseMetaData metaData = conn.getMetaData(); + assertNotNull(metaData, "DatabaseMetaData should not be null"); - //assertEquals(microsoft.sql.Types.VECTOR, rsMetaData.getColumnType(columnCount)); + try (ResultSet rs = metaData.getColumns(null, null, tableName, "%")) { + + boolean vectorColumnFound = false; + while (rs.next()) { + for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) { + String columnName = rs.getMetaData().getColumnName(i); + String columnValue = rs.getString(i); + // Uncomment the line below to see all column names and values + // System.out.println(columnName + " : " + columnValue); + } + // Below will show vector data type column but right now sp_columns_170 + // is not introduced in SQL DB instances which will return null. + // For now below assertion test runs only for SQL Server 2025 instance. + + if ("v".equalsIgnoreCase(rs.getString("COLUMN_NAME"))) { + vectorColumnFound = true; + } + } + + // Verify that the vector column "v" is present in the result set + // when running against SQL Server 2025 or later + assertTrue(vectorColumnFound, "Vector column 'v' found in metadata"); + } } } @@ -1685,3 +1750,4 @@ private SQLServerConnection getConnectionWithVectorFlag(String vectorTypeSupport } + diff --git a/src/test/java/com/microsoft/sqlserver/jdbc/parametermetadata/ParameterMetaDataTest.java b/src/test/java/com/microsoft/sqlserver/jdbc/parametermetadata/ParameterMetaDataTest.java index 6edafad465..2eb1a5b59e 100644 --- a/src/test/java/com/microsoft/sqlserver/jdbc/parametermetadata/ParameterMetaDataTest.java +++ b/src/test/java/com/microsoft/sqlserver/jdbc/parametermetadata/ParameterMetaDataTest.java @@ -13,6 +13,7 @@ import java.sql.Connection; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; +import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; @@ -23,10 +24,13 @@ import org.junit.runner.RunWith; import com.microsoft.sqlserver.jdbc.RandomUtil; +import com.microsoft.sqlserver.jdbc.SQLServerPreparedStatement; +import com.microsoft.sqlserver.jdbc.SQLServerResultSet; import com.microsoft.sqlserver.jdbc.TestUtils; import com.microsoft.sqlserver.testframework.AbstractSQLGenerator; import com.microsoft.sqlserver.testframework.AbstractTest; import com.microsoft.sqlserver.testframework.Constants; +import com.microsoft.sqlserver.testframework.vectorJsonTest; @RunWith(JUnitPlatform.class) @@ -264,4 +268,181 @@ public void testParseQueryMetaWithTVP() throws SQLException { } } } + + /** + * Tests ParameterMetaData and ResultSetMetaData for SQL Server-specific data types. + * + * This test creates a table with various SQL Server data types including DATETIMEOFFSET, DATETIME, + * SMALLDATETIME, MONEY, SMALLMONEY, UNIQUEIDENTIFIER, SQL_VARIANT, GEOMETRY, GEOGRAPHY, VECTOR, and JSON. + * + * It verifies that the JDBC driver correctly reports the SQL type code and type name for each column + * through both ParameterMetaData (for INSERT statement parameters) and ResultSetMetaData (for SELECT + * query results). + * + * Parameter type information is retrieved from the TDS stream, where SQL Server-specific + * types (SSType) are mapped to microsoft.sql.Types constants in DataType.java. + * + * @throws SQLException if a database access error occurs + */ + @Test + @vectorJsonTest + @Tag(Constants.xAzureSQLDW) + public void testCheckMetaData() throws SQLException { + + String dataTypesTableName = RandomUtil.getIdentifier("DataTypesMappingTable"); + + String createTableSQL = "CREATE TABLE " + AbstractSQLGenerator.escapeIdentifier(dataTypesTableName) + " (" + + "col_datetimeoffset DATETIMEOFFSET(7), " + + "col_datetime DATETIME, " + + "col_smalldatetime SMALLDATETIME, " + + "col_money MONEY, " + + "col_smallmoney SMALLMONEY, " + + "col_guid UNIQUEIDENTIFIER, " + + "col_sql_variant SQL_VARIANT, " + + "col_geometry GEOMETRY, " + + "col_geography GEOGRAPHY, " + + "col_vector VECTOR(3), " + + "col_json JSON" + + ")"; + + try (Connection con = getConnection(); Statement stmt = con.createStatement()) { + stmt.execute(createTableSQL); + + try { + String insertSQL = "INSERT INTO " + AbstractSQLGenerator.escapeIdentifier(dataTypesTableName) + + " (col_datetimeoffset, col_datetime, col_smalldatetime, col_money, col_smallmoney, " + + "col_guid, col_sql_variant, col_geometry, col_geography, col_vector, col_json) " + + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + + try (SQLServerPreparedStatement pstmt = (SQLServerPreparedStatement) con.prepareStatement(insertSQL)) { + ParameterMetaData paramMetaData = pstmt.getParameterMetaData(); + + // Verify parameter count + assertEquals(11, paramMetaData.getParameterCount()); + + // Column 1: DATETIMEOFFSET(7) + int sqlType1 = paramMetaData.getParameterType(1); + String sqlTypeName1 = paramMetaData.getParameterTypeName(1); + assertEquals(microsoft.sql.Types.DATETIMEOFFSET, sqlType1); + assertEquals("datetimeoffset", sqlTypeName1); + + // Column 2: DATETIME + int sqlType2 = paramMetaData.getParameterType(2); + String sqlTypeName2 = paramMetaData.getParameterTypeName(2); + assertEquals(java.sql.Types.TIMESTAMP, sqlType2); + assertEquals("datetime", sqlTypeName2); + + // Column 3: SMALLDATETIME + int sqlType3 = paramMetaData.getParameterType(3); + String sqlTypeName3 = paramMetaData.getParameterTypeName(3); + assertEquals(java.sql.Types.TIMESTAMP, sqlType3); + assertEquals("smalldatetime", sqlTypeName3); + + // Column 4: MONEY + int sqlType4 = paramMetaData.getParameterType(4); + String sqlTypeName4 = paramMetaData.getParameterTypeName(4); + assertEquals(java.sql.Types.DECIMAL, sqlType4); + assertEquals("money", sqlTypeName4); + + // Column 5: SMALLMONEY + int sqlType5 = paramMetaData.getParameterType(5); + String sqlTypeName5 = paramMetaData.getParameterTypeName(5); + assertEquals(java.sql.Types.DECIMAL, sqlType5); + assertEquals("smallmoney", sqlTypeName5); + + // Column 6: UNIQUEIDENTIFIER + int sqlType6 = paramMetaData.getParameterType(6); + String sqlTypeName6 = paramMetaData.getParameterTypeName(6); + assertEquals(java.sql.Types.CHAR, sqlType6); + assertEquals("uniqueidentifier", sqlTypeName6); + + // Column 7: SQL_VARIANT + int sqlType7 = paramMetaData.getParameterType(7); + String sqlTypeName7 = paramMetaData.getParameterTypeName(7); + assertEquals(microsoft.sql.Types.SQL_VARIANT, sqlType7); + assertEquals("sql_variant", sqlTypeName7); + + // Column 8: GEOMETRY + int sqlType8 = paramMetaData.getParameterType(8); + String sqlTypeName8 = paramMetaData.getParameterTypeName(8); + assertEquals(microsoft.sql.Types.GEOMETRY, sqlType8); + assertEquals("geometry", sqlTypeName8); + + // Column 9: GEOGRAPHY + int sqlType9 = paramMetaData.getParameterType(9); + String sqlTypeName9 = paramMetaData.getParameterTypeName(9); + assertEquals(microsoft.sql.Types.GEOGRAPHY, sqlType9); + assertEquals("geography", sqlTypeName9); + + // Column 10: VECTOR(3) + int sqlType10 = paramMetaData.getParameterType(10); + String sqlTypeName10 = paramMetaData.getParameterTypeName(10); + assertEquals(microsoft.sql.Types.VECTOR, sqlType10); + assertEquals("vector", sqlTypeName10); + + // Column 11: JSON + int sqlType11 = paramMetaData.getParameterType(11); + String sqlTypeName11 = paramMetaData.getParameterTypeName(11); + assertEquals(microsoft.sql.Types.JSON, sqlType11); + assertEquals("json", sqlTypeName11); + + } + + // Also verify ResultSetMetaData returns consistent types + try (SQLServerResultSet rs = (SQLServerResultSet) stmt + .executeQuery("SELECT * FROM " + AbstractSQLGenerator.escapeIdentifier(dataTypesTableName))) { + ResultSetMetaData rsmd = rs.getMetaData(); + + assertEquals(11, rsmd.getColumnCount()); + + // Column 1: DATETIMEOFFSET(7) + assertEquals(microsoft.sql.Types.DATETIMEOFFSET, rsmd.getColumnType(1)); + assertEquals("datetimeoffset", rsmd.getColumnTypeName(1)); + + // Column 2: DATETIME + assertEquals(java.sql.Types.TIMESTAMP, rsmd.getColumnType(2)); + assertEquals("datetime", rsmd.getColumnTypeName(2)); + + // Column 3: SMALLDATETIME + assertEquals(java.sql.Types.TIMESTAMP, rsmd.getColumnType(3)); + assertEquals("smalldatetime", rsmd.getColumnTypeName(3)); + + // Column 4: MONEY + assertEquals(java.sql.Types.DECIMAL, rsmd.getColumnType(4)); + assertEquals("money", rsmd.getColumnTypeName(4)); + + // Column 5: SMALLMONEY + assertEquals(java.sql.Types.DECIMAL, rsmd.getColumnType(5)); + assertEquals("smallmoney", rsmd.getColumnTypeName(5)); + + // Column 6: UNIQUEIDENTIFIER + assertEquals(java.sql.Types.CHAR, rsmd.getColumnType(6)); + assertEquals("uniqueidentifier", rsmd.getColumnTypeName(6)); + + // Column 7: SQL_VARIANT + assertEquals(microsoft.sql.Types.SQL_VARIANT, rsmd.getColumnType(7)); + assertEquals("sql_variant", rsmd.getColumnTypeName(7)); + + // Column 8: GEOMETRY + assertEquals(microsoft.sql.Types.GEOMETRY, rsmd.getColumnType(8)); + assertEquals("geometry", rsmd.getColumnTypeName(8)); + + // Column 9: GEOGRAPHY + assertEquals(microsoft.sql.Types.GEOGRAPHY, rsmd.getColumnType(9)); + assertEquals("geography", rsmd.getColumnTypeName(9)); + + // Column 10: VECTOR(3) + assertEquals(microsoft.sql.Types.VECTOR, rsmd.getColumnType(10)); + assertEquals("vector", rsmd.getColumnTypeName(10)); + + // Column 11: JSON + assertEquals(microsoft.sql.Types.JSON, rsmd.getColumnType(11)); + assertEquals("json", rsmd.getColumnTypeName(11)); + + } + } finally { + TestUtils.dropTableIfExists(AbstractSQLGenerator.escapeIdentifier(dataTypesTableName), stmt); + } + } + } }