Skip to content

Commit

Permalink
Merge pull request #11031 from Mause/feature/jdbc-comments
Browse files Browse the repository at this point in the history
feat(jdbc): expose comments via jdbc methods
  • Loading branch information
Mytherin authored Mar 8, 2024
2 parents 1816efe + d77a053 commit 146def5
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 34 deletions.
4 changes: 2 additions & 2 deletions src/catalog/default/default_views.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ static DefaultView internal_views[] = {
{"pg_catalog", "pg_tablespace", "SELECT 0 oid, 'pg_default' spcname, 0 spcowner, NULL spcacl, NULL spcoptions"},
{"pg_catalog", "pg_type", "SELECT type_oid oid, format_pg_type(type_name) typname, schema_oid typnamespace, 0 typowner, type_size typlen, false typbyval, CASE WHEN logical_type='ENUM' THEN 'e' else 'b' end typtype, CASE WHEN type_category='NUMERIC' THEN 'N' WHEN type_category='STRING' THEN 'S' WHEN type_category='DATETIME' THEN 'D' WHEN type_category='BOOLEAN' THEN 'B' WHEN type_category='COMPOSITE' THEN 'C' WHEN type_category='USER' THEN 'U' ELSE 'X' END typcategory, false typispreferred, true typisdefined, NULL typdelim, NULL typrelid, NULL typsubscript, NULL typelem, NULL typarray, NULL typinput, NULL typoutput, NULL typreceive, NULL typsend, NULL typmodin, NULL typmodout, NULL typanalyze, 'd' typalign, 'p' typstorage, NULL typnotnull, NULL typbasetype, NULL typtypmod, NULL typndims, NULL typcollation, NULL typdefaultbin, NULL typdefault, NULL typacl FROM duckdb_types() WHERE type_size IS NOT NULL;"},
{"pg_catalog", "pg_views", "SELECT schema_name schemaname, view_name viewname, 'duckdb' viewowner, sql definition FROM duckdb_views()"},
{"information_schema", "columns", "SELECT database_name table_catalog, schema_name table_schema, table_name, column_name, column_index ordinal_position, column_default, CASE WHEN is_nullable THEN 'YES' ELSE 'NO' END is_nullable, data_type, character_maximum_length, NULL character_octet_length, numeric_precision, numeric_precision_radix, numeric_scale, NULL datetime_precision, NULL interval_type, NULL interval_precision, NULL character_set_catalog, NULL character_set_schema, NULL character_set_name, NULL collation_catalog, NULL collation_schema, NULL collation_name, NULL domain_catalog, NULL domain_schema, NULL domain_name, NULL udt_catalog, NULL udt_schema, NULL udt_name, NULL scope_catalog, NULL scope_schema, NULL scope_name, NULL maximum_cardinality, NULL dtd_identifier, NULL is_self_referencing, NULL is_identity, NULL identity_generation, NULL identity_start, NULL identity_increment, NULL identity_maximum, NULL identity_minimum, NULL identity_cycle, NULL is_generated, NULL generation_expression, NULL is_updatable FROM duckdb_columns;"},
{"information_schema", "columns", "SELECT database_name table_catalog, schema_name table_schema, table_name, column_name, column_index ordinal_position, column_default, CASE WHEN is_nullable THEN 'YES' ELSE 'NO' END is_nullable, data_type, character_maximum_length, NULL character_octet_length, numeric_precision, numeric_precision_radix, numeric_scale, NULL datetime_precision, NULL interval_type, NULL interval_precision, NULL character_set_catalog, NULL character_set_schema, NULL character_set_name, NULL collation_catalog, NULL collation_schema, NULL collation_name, NULL domain_catalog, NULL domain_schema, NULL domain_name, NULL udt_catalog, NULL udt_schema, NULL udt_name, NULL scope_catalog, NULL scope_schema, NULL scope_name, NULL maximum_cardinality, NULL dtd_identifier, NULL is_self_referencing, NULL is_identity, NULL identity_generation, NULL identity_start, NULL identity_increment, NULL identity_maximum, NULL identity_minimum, NULL identity_cycle, NULL is_generated, NULL generation_expression, NULL is_updatable, comment AS COLUMN_COMMENT FROM duckdb_columns;"},
{"information_schema", "schemata", "SELECT database_name catalog_name, schema_name, 'duckdb' schema_owner, NULL default_character_set_catalog, NULL default_character_set_schema, NULL default_character_set_name, sql sql_path FROM duckdb_schemas()"},
{"information_schema", "tables", "SELECT database_name table_catalog, schema_name table_schema, table_name, CASE WHEN temporary THEN 'LOCAL TEMPORARY' ELSE 'BASE TABLE' END table_type, NULL self_referencing_column_name, NULL reference_generation, NULL user_defined_type_catalog, NULL user_defined_type_schema, NULL user_defined_type_name, 'YES' is_insertable_into, 'NO' is_typed, CASE WHEN temporary THEN 'PRESERVE' ELSE NULL END commit_action FROM duckdb_tables() UNION ALL SELECT database_name table_catalog, schema_name table_schema, view_name table_name, 'VIEW' table_type, NULL self_referencing_column_name, NULL reference_generation, NULL user_defined_type_catalog, NULL user_defined_type_schema, NULL user_defined_type_name, 'NO' is_insertable_into, 'NO' is_typed, NULL commit_action FROM duckdb_views;"},
{"information_schema", "tables", "SELECT database_name table_catalog, schema_name table_schema, table_name, CASE WHEN temporary THEN 'LOCAL TEMPORARY' ELSE 'BASE TABLE' END table_type, NULL self_referencing_column_name, NULL reference_generation, NULL user_defined_type_catalog, NULL user_defined_type_schema, NULL user_defined_type_name, 'YES' is_insertable_into, 'NO' is_typed, CASE WHEN temporary THEN 'PRESERVE' ELSE NULL END commit_action, comment AS TABLE_COMMENT FROM duckdb_tables() UNION ALL SELECT database_name table_catalog, schema_name table_schema, view_name table_name, 'VIEW' table_type, NULL self_referencing_column_name, NULL reference_generation, NULL user_defined_type_catalog, NULL user_defined_type_schema, NULL user_defined_type_name, 'NO' is_insertable_into, 'NO' is_typed, NULL commit_action, comment AS TABLE_COMMENT FROM duckdb_views;"},
{"information_schema", "character_sets", "SELECT NULL character_set_catalog, NULL character_set_schema, 'UTF8' character_set_name, 'UCS' character_repertoire, 'UTF8' form_of_use, current_database() default_collate_catalog, 'pg_catalog' default_collate_schema, 'ucs_basic' default_collate_name;"},
{"information_schema", "referential_constraints", "SELECT f.database_name constraint_catalog, f.schema_name constraint_schema, concat(f.source, '_', f.target, '_', f.target_column, '_fkey') constraint_name, current_database() unique_constraint_catalog, c.schema_name unique_constraint_schema, concat(c.table_name, '_', f.target_column, '_', CASE WHEN c.constraint_type == 'UNIQUE' THEN 'key' ELSE 'pkey' END) unique_constraint_name, 'NONE' match_option, 'NO ACTION' update_rule, 'NO ACTION' delete_rule FROM duckdb_constraints() c JOIN (SELECT *, name_extract['source'] as source, name_extract['target'] as target, name_extract['target_column'] as target_column FROM (SELECT *, regexp_extract(constraint_text, 'FOREIGN KEY \\(([a-zA-Z_0-9]+)\\) REFERENCES ([a-zA-Z_0-9]+)\\(([a-zA-Z_0-9]+)\\)', ['source', 'target', 'target_column']) name_extract FROM duckdb_constraints() WHERE constraint_type = 'FOREIGN KEY')) f ON name_extract['target'] = c.table_name AND (c.constraint_type = 'UNIQUE' OR c.constraint_type = 'PRIMARY KEY')"},
{"information_schema", "key_column_usage", "SELECT current_database() constraint_catalog, schema_name constraint_schema, concat(table_name, '_', constraint_column_names[1], CASE constraint_type WHEN 'FOREIGN KEY' THEN '_fkey' WHEN 'PRIMARY KEY' THEN '_pkey' ELSE '_key' END) constraint_name, current_database() table_catalog, schema_name table_schema, table_name, constraint_column_names[1] column_name, 1 ordinal_position, CASE constraint_type WHEN 'FOREIGN KEY' THEN 1 ELSE NULL END position_in_unique_constraint FROM duckdb_constraints() WHERE constraint_type = 'FOREIGN KEY' OR constraint_type = 'PRIMARY KEY' OR constraint_type = 'UNIQUE';"},
Expand Down
86 changes: 63 additions & 23 deletions tools/jdbc/src/main/java/org/duckdb/DuckDBDatabaseMetaData.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Arrays;
import java.util.Map;
import java.util.stream.Collectors;

import static java.lang.System.lineSeparator;

Expand Down Expand Up @@ -719,7 +723,7 @@ public ResultSet getTables(String catalog, String schemaPattern, String tableNam
str.append(", table_schema AS 'TABLE_SCHEM'").append(lineSeparator());
str.append(", table_name AS 'TABLE_NAME'").append(lineSeparator());
str.append(", table_type AS 'TABLE_TYPE'").append(lineSeparator());
str.append(", NULL::VARCHAR AS 'REMARKS'").append(lineSeparator());
str.append(", TABLE_COMMENT AS 'REMARKS'").append(lineSeparator());
str.append(", NULL::VARCHAR AS 'TYPE_CAT'").append(lineSeparator());
str.append(", NULL::VARCHAR AS 'TYPE_SCHEM'").append(lineSeparator());
str.append(", NULL::VARCHAR AS 'TYPE_NAME'").append(lineSeparator());
Expand Down Expand Up @@ -816,28 +820,36 @@ public ResultSet getColumns(String catalogPattern, String schemaPattern, String
columnNamePattern = "%";
}

// need to figure out the java types for the sql types :/
StringBuilder values_str = new StringBuilder(256);
values_str.append("VALUES(NULL::STRING, NULL::INTEGER)");
try (Statement gunky_statement = conn.createStatement();
// TODO this could get slow with many many columns and we really only need the
// types :/
ResultSet rs = gunky_statement.executeQuery(
"SELECT DISTINCT data_type FROM information_schema.columns ORDER BY data_type")) {
while (rs.next()) {
values_str.append(", ('")
.append(rs.getString(1))
.append("', ")
.append(
DuckDBResultSetMetaData.type_to_int(DuckDBResultSetMetaData.TypeNameToType(rs.getString(1))))
.append(")");
}
}

PreparedStatement ps = conn.prepareStatement(
"SELECT table_catalog AS 'TABLE_CAT', table_schema AS 'TABLE_SCHEM', table_name AS 'TABLE_NAME', column_name as 'COLUMN_NAME', type_id AS 'DATA_TYPE', c.data_type AS 'TYPE_NAME', NULL AS 'COLUMN_SIZE', NULL AS 'BUFFER_LENGTH', numeric_precision AS 'DECIMAL_DIGITS', 10 AS 'NUM_PREC_RADIX', CASE WHEN is_nullable = 'YES' THEN 1 else 0 END AS 'NULLABLE', NULL as 'REMARKS', column_default AS 'COLUMN_DEF', NULL AS 'SQL_DATA_TYPE', NULL AS 'SQL_DATETIME_SUB', character_octet_length AS 'CHAR_OCTET_LENGTH', ordinal_position AS 'ORDINAL_POSITION', is_nullable AS 'IS_NULLABLE', NULL AS 'SCOPE_CATALOG', NULL AS 'SCOPE_SCHEMA', NULL AS 'SCOPE_TABLE', NULL AS 'SOURCE_DATA_TYPE', '' AS 'IS_AUTOINCREMENT', '' AS 'IS_GENERATEDCOLUMN' FROM information_schema.columns c JOIN (" +
values_str +
") t(type_name, type_id) ON c.data_type = t.type_name WHERE table_catalog LIKE ? AND table_schema LIKE ? AND table_name LIKE ? AND column_name LIKE ? ORDER BY \"TABLE_CAT\",\"TABLE_SCHEM\", \"TABLE_NAME\", \"ORDINAL_POSITION\"");
PreparedStatement ps =
conn.prepareStatement("SELECT "
+ "table_catalog AS 'TABLE_CAT', "
+ "table_schema AS 'TABLE_SCHEM', "
+ "table_name AS 'TABLE_NAME', "
+ "column_name as 'COLUMN_NAME', " + makeDataMap("c.data_type", "DATA_TYPE") + ", "
+ "c.data_type AS 'TYPE_NAME', "
+ "NULL AS 'COLUMN_SIZE', NULL AS 'BUFFER_LENGTH', "
+ "numeric_precision AS 'DECIMAL_DIGITS', "
+ "10 AS 'NUM_PREC_RADIX', "
+ "CASE WHEN is_nullable = 'YES' THEN 1 else 0 END AS 'NULLABLE', "
+ "COLUMN_COMMENT as 'REMARKS', "
+ "column_default AS 'COLUMN_DEF', "
+ "NULL AS 'SQL_DATA_TYPE', "
+ "NULL AS 'SQL_DATETIME_SUB', "
+ "NULL AS 'CHAR_OCTET_LENGTH', "
+ "ordinal_position AS 'ORDINAL_POSITION', "
+ "is_nullable AS 'IS_NULLABLE', "
+ "NULL AS 'SCOPE_CATALOG', "
+ "NULL AS 'SCOPE_SCHEMA', "
+ "NULL AS 'SCOPE_TABLE', "
+ "NULL AS 'SOURCE_DATA_TYPE', "
+ "'' AS 'IS_AUTOINCREMENT', "
+ "'' AS 'IS_GENERATEDCOLUMN' "
+ "FROM information_schema.columns c "
+ "WHERE table_catalog LIKE ? AND "
+ "table_schema LIKE ? AND "
+ "table_name LIKE ? AND "
+ "column_name LIKE ? "
+ "ORDER BY \"TABLE_CAT\",\"TABLE_SCHEM\", \"TABLE_NAME\", \"ORDINAL_POSITION\"");
ps.setString(1, catalogPattern);
ps.setString(2, schemaPattern);
ps.setString(3, tableNamePattern);
Expand Down Expand Up @@ -1210,4 +1222,32 @@ public ResultSet getPseudoColumns(String catalog, String schemaPattern, String t
public boolean generatedKeyAlwaysReturned() throws SQLException {
throw new SQLFeatureNotSupportedException("generatedKeyAlwaysReturned");
}

static String dataMap;
static {
dataMap = makeCase(
Arrays.stream(DuckDBColumnType.values())
.collect(Collectors.toMap(ty -> ty.name().replace("_", " "), DuckDBResultSetMetaData::type_to_int)));
}

private static <T> String makeCase(Map<String, T> values) {
return values.entrySet()
.stream()
.map(ty -> {
T value = ty.getValue();
return String.format("WHEN '%s' THEN %s ", ty.getKey(),
value instanceof String ? String.format("'%s'", value) : value);
})
.collect(Collectors.joining());
}

/**
* @param srcColumnName
* @param destColumnName
* @return
* @see DuckDBResultSetMetaData#type_to_int(DuckDBColumnType)
*/
private static String makeDataMap(String srcColumnName, String destColumnName) {
return String.format("CASE %s %s ELSE %d END as %s", srcColumnName, dataMap, Types.JAVA_OBJECT, destColumnName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public String getColumnName(int column) throws SQLException {
return column_names[column - 1];
}

public static int type_to_int(DuckDBColumnType type) throws SQLException {
public static int type_to_int(DuckDBColumnType type) {
switch (type) {
case BOOLEAN:
return Types.BOOLEAN;
Expand Down Expand Up @@ -129,6 +129,12 @@ public static int type_to_int(DuckDBColumnType type) throws SQLException {
return Types.TIMESTAMP;
case TIMESTAMP_WITH_TIME_ZONE:
return Types.TIMESTAMP_WITH_TIMEZONE;
case TIME_WITH_TIME_ZONE:
return Types.TIME_WITH_TIMEZONE;
case STRUCT:
return Types.STRUCT;
case BIT:
return Types.BIT;
case BLOB:
return Types.BLOB;
default:
Expand Down
22 changes: 14 additions & 8 deletions tools/jdbc/src/test/java/org/duckdb/TestDuckDBJDBC.java
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ public static void test_struct_metadata() throws Exception {
assertEquals(meta.getColumnCount(), 1);
assertEquals(meta.getColumnName(1), "struct");
assertEquals(meta.getColumnTypeName(1), "STRUCT(i INTEGER, j VARCHAR)");
assertEquals(meta.getColumnType(1), Types.JAVA_OBJECT);
assertEquals(meta.getColumnType(1), Types.STRUCT);
}
}

Expand Down Expand Up @@ -1726,6 +1726,10 @@ public static void test_schema_reflection() throws Exception {
Statement stmt = conn.createStatement();
stmt.execute("CREATE TABLE a (i INTEGER)");
stmt.execute("CREATE VIEW b AS SELECT i::STRING AS j FROM a");
stmt.execute("COMMENT ON TABLE a IS 'a table'");
stmt.execute("COMMENT ON COLUMN a.i IS 'a column'");
stmt.execute("COMMENT ON VIEW b IS 'a view'");
stmt.execute("COMMENT ON COLUMN b.j IS 'a column'");

DatabaseMetaData md = conn.getMetaData();
ResultSet rs;
Expand Down Expand Up @@ -1756,8 +1760,8 @@ public static void test_schema_reflection() throws Exception {
assertEquals(rs.getString(3), "a");
assertEquals(rs.getString("TABLE_TYPE"), "BASE TABLE");
assertEquals(rs.getString(4), "BASE TABLE");
assertNull(rs.getObject("REMARKS"));
assertNull(rs.getObject(5));
assertEquals(rs.getObject("REMARKS"), "a table");
assertEquals(rs.getObject(5), "a table");
assertNull(rs.getObject("TYPE_CAT"));
assertNull(rs.getObject(6));
assertNull(rs.getObject("TYPE_SCHEM"));
Expand All @@ -1777,8 +1781,8 @@ public static void test_schema_reflection() throws Exception {
assertEquals(rs.getString(3), "b");
assertEquals(rs.getString("TABLE_TYPE"), "VIEW");
assertEquals(rs.getString(4), "VIEW");
assertNull(rs.getObject("REMARKS"));
assertNull(rs.getObject(5));
assertEquals(rs.getObject("REMARKS"), "a view");
assertEquals(rs.getObject(5), "a view");
assertNull(rs.getObject("TYPE_CAT"));
assertNull(rs.getObject(6));
assertNull(rs.getObject("TYPE_SCHEM"));
Expand All @@ -1803,8 +1807,8 @@ public static void test_schema_reflection() throws Exception {
assertEquals(rs.getString(3), "a");
assertEquals(rs.getString("TABLE_TYPE"), "BASE TABLE");
assertEquals(rs.getString(4), "BASE TABLE");
assertNull(rs.getObject("REMARKS"));
assertNull(rs.getObject(5));
assertEquals(rs.getObject("REMARKS"), "a table");
assertEquals(rs.getObject(5), "a table");
assertNull(rs.getObject("TYPE_CAT"));
assertNull(rs.getObject(6));
assertNull(rs.getObject("TYPE_SCHEM"));
Expand All @@ -1830,6 +1834,7 @@ public static void test_schema_reflection() throws Exception {
assertEquals(rs.getString("TABLE_NAME"), "a");
assertEquals(rs.getString(3), "a");
assertEquals(rs.getString("COLUMN_NAME"), "i");
assertEquals(rs.getString("REMARKS"), "a column");
assertEquals(rs.getString(4), "i");
assertEquals(rs.getInt("DATA_TYPE"), Types.INTEGER);
assertEquals(rs.getInt(5), Types.INTEGER);
Expand Down Expand Up @@ -1862,6 +1867,7 @@ public static void test_schema_reflection() throws Exception {
assertNull(rs.getObject(7));
assertNull(rs.getObject("BUFFER_LENGTH"));
assertNull(rs.getObject(8));
assertEquals(rs.getString("REMARKS"), "a column");

rs.close();

Expand Down Expand Up @@ -1908,7 +1914,7 @@ public static void test_time_tz() throws Exception {
rs.next();

assertEquals(rs.getString("TYPE_NAME"), "TIME WITH TIME ZONE");
assertEquals(rs.getInt("DATA_TYPE"), Types.JAVA_OBJECT);
assertEquals(rs.getInt("DATA_TYPE"), Types.TIME_WITH_TIMEZONE);
}

s.execute(
Expand Down

0 comments on commit 146def5

Please sign in to comment.