Skip to content

Commit

Permalink
Use double quotes for all column names in SHOW CREATE TABLE
Browse files Browse the repository at this point in the history
Currently SHOW CREATE TABLE does not use double quotes for column names which are reserved words. Because of this, copying the output from show create table to create the table fails. We are now using double quotes in SHOW CREATE TABLE for all column names if not already double quoted
  • Loading branch information
abhiseksaikia authored and mbasmanova committed May 17, 2021
1 parent 18f7e17 commit c63a7c7
Show file tree
Hide file tree
Showing 10 changed files with 152 additions and 107 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -129,15 +129,15 @@ public void testShowCreateTable()
{
assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue())
.isEqualTo("CREATE TABLE elasticsearch.tpch.orders (\n" +
" clerk varchar,\n" +
" comment varchar,\n" +
" custkey bigint,\n" +
" orderdate timestamp,\n" +
" orderkey bigint,\n" +
" orderpriority varchar,\n" +
" orderstatus varchar,\n" +
" shippriority bigint,\n" +
" totalprice real\n" +
" \"clerk\" varchar,\n" +
" \"comment\" varchar,\n" +
" \"custkey\" bigint,\n" +
" \"orderdate\" timestamp,\n" +
" \"orderkey\" bigint,\n" +
" \"orderpriority\" varchar,\n" +
" \"orderstatus\" varchar,\n" +
" \"shippriority\" bigint,\n" +
" \"totalprice\" real\n" +
")");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2597,45 +2597,60 @@ boolean isIncreasingSequence(List<Integer> fileNames)
@Test
public void testShowCreateTable()
{
String createTableSql = format("" +
"CREATE TABLE %s.%s.%s (\n" +
" c1 bigint,\n" +
" c2 double,\n" +
" \"c 3\" varchar,\n" +
" \"c'4\" array(bigint),\n" +
" c5 map(bigint, varchar)\n" +
")\n" +
"WITH (\n" +
" format = 'RCBINARY'\n" +
")",
String createTableFormat = "CREATE TABLE %s.%s.%s (\n" +
" %s bigint,\n" +
" %s double,\n" +
" \"c 3\" varchar,\n" +
" \"c'4\" array(bigint),\n" +
" %s map(bigint, varchar)\n" +
")\n" +
"WITH (\n" +
" format = 'RCBINARY'\n" +
")";
String createTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
getSession().getSchema().get(),
"test_show_create_table");

"test_show_create_table",
"c1",
"c2",
"c5");
String expectedShowCreateTable = format(
createTableFormat,
getSession().getCatalog().get(),
getSession().getSchema().get(),
"test_show_create_table",
"\"c1\"",
"\"c2\"",
"\"c5\"");
assertUpdate(createTableSql);
MaterializedResult actualResult = computeActual("SHOW CREATE TABLE test_show_create_table");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);

createTableSql = format("" +
"CREATE TABLE %s.%s.%s (\n" +
" c1 bigint,\n" +
" \"c 2\" varchar,\n" +
" \"c'3\" array(bigint),\n" +
" c4 map(bigint, varchar) COMMENT 'comment test4',\n" +
" c5 double COMMENT 'comment test5'\n)\n" +
"COMMENT 'test'\n" +
"WITH (\n" +
" bucket_count = 5,\n" +
" bucketed_by = ARRAY['c1','c 2'],\n" +
" format = 'ORC',\n" +
" orc_bloom_filter_columns = ARRAY['c1','c2'],\n" +
" orc_bloom_filter_fpp = 7E-1,\n" +
" partitioned_by = ARRAY['c5'],\n" +
" sorted_by = ARRAY['c1','c 2 DESC']\n" +
")",
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), expectedShowCreateTable);

createTableFormat = "CREATE TABLE %s.%s.%s (\n" +
" %s bigint,\n" +
" \"c 2\" varchar,\n" +
" \"c'3\" array(bigint),\n" +
" %s map(bigint, varchar) COMMENT 'comment test4',\n" +
" %s double COMMENT 'comment test5'\n)\n" +
"COMMENT 'test'\n" +
"WITH (\n" +
" bucket_count = 5,\n" +
" bucketed_by = ARRAY['c1','c 2'],\n" +
" format = 'ORC',\n" +
" orc_bloom_filter_columns = ARRAY['c1','c2'],\n" +
" orc_bloom_filter_fpp = 7E-1,\n" +
" partitioned_by = ARRAY['c5'],\n" +
" sorted_by = ARRAY['c1','c 2 DESC']\n" +
")";
createTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
getSession().getSchema().get(),
"\"test_show_create_table'2\"");
"\"test_show_create_table'2\"",
"\"c1\"",
"\"c2\"",
"\"c5\"");
assertUpdate(createTableSql);
actualResult = computeActual("SHOW CREATE TABLE \"test_show_create_table'2\"");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
Expand All @@ -2651,7 +2666,7 @@ public void testCreateExternalTable()

@Language("SQL") String createTableSql = format("" +
"CREATE TABLE %s.%s.test_create_external (\n" +
" name varchar\n" +
" \"name\" varchar\n" +
")\n" +
"WITH (\n" +
" external_location = '%s',\n" +
Expand Down Expand Up @@ -4747,8 +4762,8 @@ protected void testAlterAvroTableWithSchemaUrl(boolean renameColumn, boolean add
private String getAvroCreateTableSql(String tableName, String schemaFile)
{
return format("CREATE TABLE %s.%s.%s (\n" +
" dummy_col varchar,\n" +
" another_dummy_col varchar\n" +
" \"dummy_col\" varchar,\n" +
" \"another_dummy_col\" varchar\n" +
")\n" +
"WITH (\n" +
" avro_schema_url = '%s',\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.PrincipalType;
import com.facebook.presto.spi.session.PropertyMetadata;
import com.facebook.presto.sql.QueryUtil;
import com.facebook.presto.sql.analyzer.QueryExplainer;
import com.facebook.presto.sql.analyzer.SemanticException;
import com.facebook.presto.sql.parser.ParsingException;
Expand Down Expand Up @@ -508,12 +509,11 @@ protected Node visitShowCreate(ShowCreate node, Void context)
ConnectorTableMetadata connectorTableMetadata = metadata.getTableMetadata(session, tableHandle.get()).getMetadata();

Map<String, PropertyMetadata<?>> allColumnProperties = metadata.getColumnPropertyManager().getAllProperties().get(tableHandle.get().getConnectorId());

List<TableElement> columns = connectorTableMetadata.getColumns().stream()
.filter(column -> !column.isHidden())
.map(column -> {
List<Property> propertyNodes = buildProperties(objectName, Optional.of(column.getName()), INVALID_COLUMN_PROPERTY, column.getProperties(), allColumnProperties);
return new ColumnDefinition(new Identifier(column.getName()), column.getType().getDisplayName(), column.isNullable(), propertyNodes, Optional.ofNullable(column.getComment()));
return new ColumnDefinition(QueryUtil.quotedIdentifier(column.getName()), column.getType().getDisplayName(), column.isNullable(), propertyNodes, Optional.ofNullable(column.getComment()));
})
.collect(toImmutableList());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,28 +203,46 @@ public void testCharTrailingSpace()
@Test
public void testInsertIntoNotNullColumn()
{
@Language("SQL") String createTableSql = format("" +
"CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" column_a date,\n" +
" column_b date NOT NULL\n" +
")",
getSession().getCatalog().get());
String createTableFormat = "CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" %s date,\n" +
" %s date NOT NULL\n" +
")";
@Language("SQL") String createTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"column_a",
"column_b");
@Language("SQL") String expectedCreateTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"\"column_a\"",
"\"column_b\"");
assertUpdate(createTableSql);
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), createTableSql);
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), expectedCreateTableSql);

assertQueryFails("INSERT INTO test_insert_not_null (column_a) VALUES (date '2012-12-31')", "NULL value not allowed for NOT NULL column: column_b");
assertQueryFails("INSERT INTO test_insert_not_null (column_a, column_b) VALUES (date '2012-12-31', null)", "NULL value not allowed for NOT NULL column: column_b");

assertUpdate("ALTER TABLE test_insert_not_null ADD COLUMN column_c BIGINT NOT NULL");

createTableSql = format("" +
"CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" column_a date,\n" +
" column_b date NOT NULL,\n" +
" column_c bigint NOT NULL\n" +
")",
getSession().getCatalog().get());
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), createTableSql);
createTableFormat = "CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" %s date,\n" +
" %s date NOT NULL,\n" +
" %s bigint NOT NULL\n" +
")";
createTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"column_a",
"column_b",
"column_c");
expectedCreateTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"\"column_a\"",
"\"column_b\"",
"\"column_c\"");
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), expectedCreateTableSql);

assertQueryFails("INSERT INTO test_insert_not_null (column_b) VALUES (date '2012-12-31')", "NULL value not allowed for NOT NULL column: column_c");
assertQueryFails("INSERT INTO test_insert_not_null (column_b, column_c) VALUES (date '2012-12-31', null)", "NULL value not allowed for NOT NULL column: column_c");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,15 @@ public void testShowCreateTable()
assertThat((String) computeActual("SHOW CREATE TABLE orders").getOnlyValue())
// If the connector reports additional column properties, the expected value needs to be adjusted in the test subclass
.matches("CREATE TABLE \\w+\\.\\w+\\.orders \\Q(\n" +
" orderkey decimal(19, 0),\n" +
" custkey decimal(19, 0),\n" +
" orderstatus varchar(1),\n" +
" totalprice double,\n" +
" orderdate timestamp(3),\n" +
" orderpriority varchar(15),\n" +
" clerk varchar(15),\n" +
" shippriority decimal(10, 0),\n" +
" comment varchar(79)\n" +
" \"orderkey\" decimal(19, 0),\n" +
" \"custkey\" decimal(19, 0),\n" +
" \"orderstatus\" varchar(1),\n" +
" \"totalprice\" double,\n" +
" \"orderdate\" timestamp(3),\n" +
" \"orderpriority\" varchar(15),\n" +
" \"clerk\" varchar(15),\n" +
" \"shippriority\" decimal(10, 0),\n" +
" \"comment\" varchar(79)\n" +
")");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -209,28 +209,40 @@ public void testCharTrailingSpace()
@Test
public void testInsertIntoNotNullColumn()
{
@Language("SQL") String createTableSql = format("" +
"CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" column_a date,\n" +
" column_b date NOT NULL\n" +
")",
getSession().getCatalog().get());
String createTableFormat = "CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" %s date,\n" +
" %s date NOT NULL\n" +
")";
@Language("SQL") String createTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"column_a",
"column_b");
@Language("SQL") String expectedShowCreateTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"\"column_a\"",
"\"column_b\"");
assertUpdate(createTableSql);
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), createTableSql);
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), expectedShowCreateTableSql);

assertQueryFails("INSERT INTO test_insert_not_null (column_a) VALUES (date '2012-12-31')", "NULL value not allowed for NOT NULL column: column_b");
assertQueryFails("INSERT INTO test_insert_not_null (column_a, column_b) VALUES (date '2012-12-31', null)", "NULL value not allowed for NOT NULL column: column_b");

assertUpdate("ALTER TABLE test_insert_not_null ADD COLUMN column_c BIGINT NOT NULL");

createTableSql = format("" +
"CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" column_a date,\n" +
" column_b date NOT NULL,\n" +
" column_c bigint NOT NULL\n" +
")",
getSession().getCatalog().get());
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), createTableSql);
createTableFormat = "CREATE TABLE %s.tpch.test_insert_not_null (\n" +
" %s date,\n" +
" %s date NOT NULL,\n" +
" %s bigint NOT NULL\n" +
")";
expectedShowCreateTableSql = format(
createTableFormat,
getSession().getCatalog().get(),
"\"column_a\"",
"\"column_b\"",
"\"column_c\"");
assertEquals(computeScalar("SHOW CREATE TABLE test_insert_not_null"), expectedShowCreateTableSql);

assertQueryFails("INSERT INTO test_insert_not_null (column_b) VALUES (date '2012-12-31')", "NULL value not allowed for NOT NULL column: column_c");
assertQueryFails("INSERT INTO test_insert_not_null (column_b, column_c) VALUES (date '2012-12-31', null)", "NULL value not allowed for NOT NULL column: column_c");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ c_varchar|varchar|||
--! name: show create csv table
SHOW CREATE TABLE csv_table
--!
CREATE TABLE hive.default.csv_table (\n c_bigint varchar,\n c_varchar varchar\n)\nWITH (\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table',\n format = 'CSV'\n)
CREATE TABLE hive.default.csv_table (\n "c_bigint" varchar,\n "c_varchar" varchar\n)\nWITH (\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table',\n format = 'CSV'\n)
--! name: create table like
DROP TABLE IF EXISTS like_csv_table;
CREATE TABLE like_csv_table (LIKE csv_table INCLUDING PROPERTIES);
SHOW CREATE TABLE like_csv_table
--!
CREATE TABLE hive.default.like_csv_table (\n c_bigint varchar,\n c_varchar varchar\n)\nWITH (\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table',\n format = 'CSV'\n)
CREATE TABLE hive.default.like_csv_table (\n "c_bigint" varchar,\n "c_varchar" varchar\n)\nWITH (\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table',\n format = 'CSV'\n)
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ c_varchar|varchar|||
--! name: show create csv table
SHOW CREATE TABLE csv_table_with_custom_parameters
--!
CREATE TABLE hive.default.csv_table_with_custom_parameters (\n c_bigint varchar,\n c_varchar varchar\n)\nWITH (\n csv_escape = 'E',\n csv_quote = 'Q',\n csv_separator = 'S',\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table_with_custom_parameters',\n format = 'CSV'\n)
CREATE TABLE hive.default.csv_table_with_custom_parameters (\n "c_bigint" varchar,\n "c_varchar" varchar\n)\nWITH (\n csv_escape = 'E',\n csv_quote = 'Q',\n csv_separator = 'S',\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table_with_custom_parameters',\n format = 'CSV'\n)
--! name: create table like
DROP TABLE IF EXISTS like_csv_table;
CREATE TABLE like_csv_table (LIKE csv_table_with_custom_parameters INCLUDING PROPERTIES);
SHOW CREATE TABLE like_csv_table
--!
CREATE TABLE hive.default.like_csv_table (\n c_bigint varchar,\n c_varchar varchar\n)\nWITH (\n csv_escape = 'E',\n csv_quote = 'Q',\n csv_separator = 'S',\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table_with_custom_parameters',\n format = 'CSV'\n)
CREATE TABLE hive.default.like_csv_table (\n "c_bigint" varchar,\n "c_varchar" varchar\n)\nWITH (\n csv_escape = 'E',\n csv_quote = 'Q',\n csv_separator = 'S',\n external_location = 'hdfs://hadoop-master:9000/product-test/datasets/csv_table_with_custom_parameters',\n format = 'CSV'\n)
Loading

0 comments on commit c63a7c7

Please sign in to comment.