Skip to content

Commit c9a7beb

Browse files
authored
SEA metadata: throw for invalid null/empty params (Thrift parity) (#1390)
## Summary - Change key-based SEA metadata operations (`getPrimaryKeys`, `getImportedKeys`, `getExportedKeys`, `getCrossReference`) to **throw `SQLException`** instead of returning empty `ResultSet`s for invalid parameter combinations - Matches Thrift server behavior: null/empty table → throw, null schema with explicit catalog → throw, all-empty strings in cross-reference → throw - Builds on #1370 which added null catalog/schema resolution for valid cases ## What changed - **`DatabricksMetadataQueryClient.resolveKeyBasedParams()`**: Now throws `DatabricksSQLException` for invalid combos instead of returning `null` - **`DatabricksMetadataQueryClient.listExportedKeys()`**: Added null/empty table validation - **`DatabricksDatabaseMetaData.getCrossReference()`**: Added empty string validation for both parent and foreign sides - **`DatabricksMetadataQueryClientTest`**: Updated tests to expect exceptions instead of empty results - **`MetadataNullResolutionTests`**: Updated integration tests to verify exception-throwing behavior ## Thrift behavior reference | Scenario | Thrift behavior | SEA behavior (after this PR) | |---|---|---| | `getPrimaryKeys(null, null, table)` | Resolves to current catalog/schema | Same (from #1370) | | `getPrimaryKeys(catalog, null, table)` | **Throws** | **Throws** | | `getPrimaryKeys(catalog, schema, null)` | **Throws** | **Throws** | | `getCrossReference(all empty strings)` | **Throws** | **Throws** | ## Test plan - [x] Unit tests: `DatabricksMetadataQueryClientTest` — 57 tests pass - [ ] Integration tests: `MetadataNullResolutionTests` (requires WireMock stub re-recording) - [ ] CI pipeline NO_CHANGELOG=true This pull request was AI-assisted by Isaac. --------- Signed-off-by: Madhavendra Rathore <madhavendra.rathore@databricks.com>
1 parent cbf02a8 commit c9a7beb

File tree

16 files changed

+197
-524
lines changed

16 files changed

+197
-524
lines changed

src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1132,9 +1132,18 @@ public ResultSet getCrossReference(
11321132
foreignTable));
11331133

11341134
throwExceptionIfConnectionIsClosed();
1135-
if (parentTable == null && foreignTable == null) {
1135+
// Thrift requires parentTable — null or empty parentTable is invalid
1136+
if (parentTable == null || parentTable.isEmpty()) {
1137+
LOGGER.debug("getCrossReference: parentTable is null or empty, throwing");
11361138
throw new DatabricksSQLException(
1137-
"Invalid argument: foreignTable and parentTableName are both null",
1139+
"Invalid argument: parentTable may not be null or empty",
1140+
DatabricksDriverErrorCode.INVALID_STATE);
1141+
}
1142+
// Empty foreign table is also invalid — Thrift server rejects it
1143+
if (foreignTable != null && foreignTable.isEmpty()) {
1144+
LOGGER.debug("getCrossReference: foreignTable is empty string, throwing");
1145+
throw new DatabricksSQLException(
1146+
"Invalid argument: foreignTable may not be empty",
11381147
DatabricksDriverErrorCode.INVALID_STATE);
11391148
}
11401149

src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java

Lines changed: 41 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@
1414
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
1515
import com.databricks.jdbc.dbclient.impl.common.CommandConstants;
1616
import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder;
17+
import com.databricks.jdbc.exception.DatabricksSQLException;
1718
import com.databricks.jdbc.log.JdbcLogger;
1819
import com.databricks.jdbc.log.JdbcLoggerFactory;
20+
import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode;
1921
import java.sql.ResultSet;
2022
import java.sql.ResultSetMetaData;
2123
import java.sql.SQLException;
@@ -324,19 +326,6 @@ public DatabricksResultSet listPrimaryKeys(
324326
catalog = autoFillCatalog(catalog, currentCatalog);
325327

326328
String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
327-
if (resolvedParams == null) {
328-
LOGGER.debug(
329-
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listPrimaryKeys",
330-
catalog,
331-
schema,
332-
table);
333-
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
334-
PRIMARY_KEYS_COLUMNS,
335-
new ArrayList<>(),
336-
METADATA_STATEMENT_ID,
337-
com.databricks.jdbc.common.CommandName.LIST_PRIMARY_KEYS);
338-
}
339-
340329
String resolvedCatalog = resolvedParams[0];
341330
String resolvedSchema = resolvedParams[1];
342331
String resolvedTable = resolvedParams[2];
@@ -373,19 +362,6 @@ public DatabricksResultSet listImportedKeys(
373362
catalog = autoFillCatalog(catalog, currentCatalog);
374363

375364
String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
376-
if (resolvedParams == null) {
377-
LOGGER.debug(
378-
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listImportedKeys",
379-
catalog,
380-
schema,
381-
table);
382-
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
383-
IMPORTED_KEYS_COLUMNS,
384-
new ArrayList<>(),
385-
METADATA_STATEMENT_ID,
386-
com.databricks.jdbc.common.CommandName.GET_IMPORTED_KEYS);
387-
}
388-
389365
String resolvedCatalog = resolvedParams[0];
390366
String resolvedSchema = resolvedParams[1];
391367
String resolvedTable = resolvedParams[2];
@@ -414,6 +390,12 @@ public DatabricksResultSet listExportedKeys(
414390
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
415391
LOGGER.debug("public ResultSet listExportedKeys() using SDK");
416392

393+
if (table == null) {
394+
LOGGER.debug("listExportedKeys: table is null, throwing");
395+
throw new DatabricksSQLException(
396+
"Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE);
397+
}
398+
417399
// Only fetch currentCatalog if multiple catalog support is disabled
418400
String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
419401
if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) {
@@ -438,6 +420,12 @@ public DatabricksResultSet listCrossReferences(
438420
throws SQLException {
439421
LOGGER.debug("public ResultSet listCrossReferences() using SDK");
440422

423+
// Null foreignTable means "unspecified" — Thrift server returns empty ResultSet
424+
if (foreignTable == null) {
425+
LOGGER.debug("listCrossReferences: foreignTable is null, returning empty result set");
426+
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
427+
}
428+
441429
// Only fetch currentCatalog if multiple catalog support is disabled
442430
String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
443431
if (!metadataResultSetBuilder.shouldAllowCatalogAccess(parentCatalog, currentCatalog, session)
@@ -446,29 +434,12 @@ public DatabricksResultSet listCrossReferences(
446434
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
447435
}
448436

449-
// Resolve null params for the foreign side (used to build the SQL query)
437+
// Resolve null catalog/schema for the foreign side (used to build the SQL query)
450438
String[] resolvedForeignParams =
451439
resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session);
452-
if (resolvedForeignParams == null) {
453-
LOGGER.debug(
454-
"Could not resolve foreign key-based params (catalog={}, schema={}, table={}), returning empty result set",
455-
foreignCatalog,
456-
foreignSchema,
457-
foreignTable);
458-
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
459-
}
460-
461-
// Resolve null params for the parent side (used for filtering results)
440+
// Resolve null catalog/schema for the parent side (used for filtering results)
462441
String[] resolvedParentParams =
463442
resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session);
464-
if (resolvedParentParams == null) {
465-
LOGGER.debug(
466-
"Could not resolve parent key-based params (catalog={}, schema={}, table={}), returning empty result set",
467-
parentCatalog,
468-
parentSchema,
469-
parentTable);
470-
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
471-
}
472443

473444
String resolvedForeignCatalog = resolvedForeignParams[0];
474445
String resolvedForeignSchema = resolvedForeignParams[1];
@@ -542,15 +513,21 @@ private String autoFillCatalog(String catalog, String currentCatalog) {
542513
}
543514

544515
/**
545-
* Resolves null catalog/schema for key-based metadata operations to match Thrift server behavior.
546-
* When catalog is null, it is replaced with current_catalog and (if schema is also null) schema
547-
* is replaced with current_schema. Returns null if the caller should return an empty result set
548-
* (table is null, schema is null without catalog also being null, or any resolved value is null).
516+
* Validates and resolves null catalog/schema/table for key-based metadata operations to match
517+
* Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations
518+
* (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and
519+
* (if schema is also null) schema is replaced with current_schema.
520+
*
521+
* @throws DatabricksSQLException if table is null/empty, or schema is null with an explicit
522+
* catalog
549523
*/
550524
private String[] resolveKeyBasedParams(
551525
String catalog, String schema, String table, IDatabricksSession session) throws SQLException {
552-
if (table == null) {
553-
return null;
526+
if (table == null || table.isEmpty()) {
527+
LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing");
528+
throw new DatabricksSQLException(
529+
"Invalid argument: tableName may not be null or empty",
530+
DatabricksDriverErrorCode.INVALID_STATE);
554531
}
555532

556533
if (catalog == null) {
@@ -560,11 +537,22 @@ private String[] resolveKeyBasedParams(
560537
schema = currentCatalogAndSchema[1];
561538
}
562539
} else if (schema == null) {
563-
return null;
540+
LOGGER.debug(
541+
"resolveKeyBasedParams: schema is null with explicit catalog '{}', throwing", catalog);
542+
throw new DatabricksSQLException(
543+
"Invalid argument: schema may not be null when catalog is specified",
544+
DatabricksDriverErrorCode.INVALID_STATE);
564545
}
565546

547+
// Safety net: getCurrentCatalogAndSchema() returned null values
566548
if (catalog == null || schema == null) {
567-
return null;
549+
LOGGER.debug(
550+
"resolveKeyBasedParams: could not resolve catalog or schema (catalog={}, schema={})",
551+
catalog,
552+
schema);
553+
throw new DatabricksSQLException(
554+
"Invalid argument: could not resolve catalog or schema",
555+
DatabricksDriverErrorCode.INVALID_STATE);
568556
}
569557

570558
return new String[] {catalog, schema, table};

src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java

Lines changed: 63 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -759,9 +759,8 @@ void testListCrossReferences() throws Exception {
759759
}
760760

761761
/**
762-
* Tests that getCrossReference returns empty result set (not an exception) when all three
763-
* foreign-side parameters are null. Matches Thrift server behavior where null foreign table
764-
* delegates to getExportedKeys which returns empty in DBSQL.
762+
* Tests that getCrossReference returns empty result set (not an exception) when foreign table is
763+
* null. Matches Thrift server behavior where null table means "unspecified" and returns empty.
765764
*/
766765
@Test
767766
void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exception {
@@ -770,7 +769,22 @@ void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exceptio
770769
DatabricksResultSet result =
771770
metadataClient.listCrossReferences(
772771
session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null);
773-
assertFalse(result.next(), "Should return empty when all foreign params are null, not throw");
772+
assertFalse(result.next(), "Should return empty when foreign table is null");
773+
}
774+
775+
/**
776+
* Tests that getCrossReference returns empty result set when parent table is null but foreign
777+
* table is specified. Thrift server requires parentTable, but the null check is at the
778+
* DatabricksDatabaseMetaData layer. At this layer, null parentTable with null foreignTable
779+
* returns empty since foreignTable == null triggers the early return.
780+
*/
781+
@Test
782+
void testListCrossReferences_bothTablesNull_returnsEmpty() throws Exception {
783+
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);
784+
785+
DatabricksResultSet result =
786+
metadataClient.listCrossReferences(session, null, null, null, null, null, null);
787+
assertFalse(result.next(), "Should return empty when both tables are null");
774788
}
775789

776790
@Test
@@ -943,40 +957,58 @@ void testListFunctionsWithNullCatalog() throws SQLException {
943957
}
944958

945959
@Test
946-
void testKeyBasedOpsReturnEmptyForNullTable() throws SQLException {
960+
void testKeyBasedOpsThrowForNullTable() {
947961
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);
948962

949-
// null table should return empty for listPrimaryKeys
950-
DatabricksResultSet pkResult =
951-
metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
952-
assertNotNull(pkResult);
953-
assertFalse(pkResult.next(), "Expected empty result set for listPrimaryKeys with null table");
954-
955-
// null table should return empty for listImportedKeys
956-
DatabricksResultSet ikResult =
957-
metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
958-
assertNotNull(ikResult);
959-
assertFalse(ikResult.next(), "Expected empty result set for listImportedKeys with null table");
963+
assertThrows(
964+
DatabricksSQLException.class,
965+
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
966+
"listPrimaryKeys should throw for null table");
967+
968+
assertThrows(
969+
DatabricksSQLException.class,
970+
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
971+
"listImportedKeys should throw for null table");
960972
}
961973

962974
@Test
963-
void testKeyBasedOpsReturnEmptyForNullSchemaWithExplicitCatalog() throws SQLException {
975+
void testKeyBasedOpsThrowForEmptyTable() {
964976
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);
965977

966-
// schema=null with explicit catalog should return empty (matching Thrift behavior)
967-
DatabricksResultSet pkResult =
968-
metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE);
969-
assertNotNull(pkResult);
970-
assertFalse(
971-
pkResult.next(),
972-
"Expected empty result set for listPrimaryKeys with null schema and explicit catalog");
973-
974-
DatabricksResultSet ikResult =
975-
metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE);
976-
assertNotNull(ikResult);
977-
assertFalse(
978-
ikResult.next(),
979-
"Expected empty result set for listImportedKeys with null schema and explicit catalog");
978+
assertThrows(
979+
DatabricksSQLException.class,
980+
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
981+
"listPrimaryKeys should throw for empty table");
982+
983+
assertThrows(
984+
DatabricksSQLException.class,
985+
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
986+
"listImportedKeys should throw for empty table");
987+
}
988+
989+
@Test
990+
void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() {
991+
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);
992+
993+
assertThrows(
994+
DatabricksSQLException.class,
995+
() -> metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE),
996+
"listPrimaryKeys should throw for null schema with explicit catalog");
997+
998+
assertThrows(
999+
DatabricksSQLException.class,
1000+
() -> metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE),
1001+
"listImportedKeys should throw for null schema with explicit catalog");
1002+
}
1003+
1004+
@Test
1005+
void testExportedKeysThrowsForNullTable() {
1006+
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);
1007+
1008+
assertThrows(
1009+
DatabricksSQLException.class,
1010+
() -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
1011+
"listExportedKeys should throw for null table");
9801012
}
9811013

9821014
@Test

0 commit comments

Comments
 (0)