File tree Expand file tree Collapse file tree 1 file changed +0
-32
lines changed
src/databricks/labs/ucx/hive_metastore Expand file tree Collapse file tree 1 file changed +0
-32
lines changed Original file line number Diff line number Diff line change @@ -424,38 +424,6 @@ def _deduplicate_mounts(mounts: list) -> list:
424
424
deduplicated_mounts .append (obj )
425
425
return deduplicated_mounts
426
426
427
- @cached_property
428
- def _jvm (self ):
429
- # pylint: disable=import-error,import-outside-toplevel,broad-exception-caught
430
- try :
431
- from pyspark .sql .session import SparkSession # type: ignore[import-not-found]
432
-
433
- spark = SparkSession .builder .getOrCreate ()
434
- return spark ._jvm # pylint: disable=protected-access
435
- except Exception as err :
436
- logger .warning (f"Cannot create Py4j proxy: { err } " )
437
- return None
438
-
439
- def _resolve_dbfs_root (self ) -> Mount | None :
440
- # TODO: Consider deprecating this method and rely on the new API call
441
- # pylint: disable=broad-exception-caught,too-many-try-statements
442
- try :
443
- jvm = self ._jvm
444
- if not jvm :
445
- return None
446
- uri = jvm .java .net .URI
447
- some = jvm .scala .Some
448
- hms_fed_dbfs_utils = jvm .com .databricks .sql .managedcatalog .connections .HmsFedDbfsUtils
449
- root_location_opt = hms_fed_dbfs_utils .resolveDbfsPath (some (uri ("dbfs:/user/hive/warehouse" )))
450
- if root_location_opt .isDefined ():
451
- source : str = root_location_opt .get ().toString ()
452
- source = source .removesuffix ('user/hive/warehouse' )
453
- return Mount ("/" , source )
454
- return None
455
- except Exception as err :
456
- logger .warning (f"Failed to resolve DBFS root location: { err } " )
457
- return None
458
-
459
427
def _crawl (self ) -> Iterable [Mount ]:
460
428
mounts = []
461
429
try :
You can’t perform that action at this time.
0 commit comments