Skip to content

Commit a84381b

Browse files
committed
Updated test with new crawler
1 parent b0e82a0 commit a84381b

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

src/databricks/labs/ucx/framework/crawlers.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ def fetch(self, sql) -> Iterator[any]:
2222
raise NotImplementedError
2323

2424
@abstractmethod
25-
def save_table(self, full_name: str, rows: list[any], klass: type, mode: str = "append"):
25+
def save_table(self, full_name: str, rows: list[any], klass: dataclasses.dataclass, mode: str = "append"):
2626
raise NotImplementedError
2727

28-
def create_table(self, full_name: str, klass: type):
28+
def create_table(self, full_name: str, klass: dataclasses.dataclass):
2929
ddl = f"CREATE TABLE IF NOT EXISTS {full_name} ({self._schema_for(klass)}) USING DELTA"
3030
self.execute(ddl)
3131

@@ -86,7 +86,7 @@ def fetch(self, sql) -> Iterator[any]:
8686
logger.debug(f"[api][fetch] {sql}")
8787
return self._sql.execute_fetch_all(self._warehouse_id, sql)
8888

89-
def save_table(self, full_name: str, rows: list[any], klass: type, mode="append"):
89+
def save_table(self, full_name: str, rows: list[any], klass: dataclasses.dataclass, mode="append"):
9090
if mode == "overwrite":
9191
msg = "Overwrite mode is not yet supported"
9292
raise NotImplementedError(msg)
@@ -140,7 +140,7 @@ def fetch(self, sql) -> Iterator[any]:
140140
logger.debug(f"[spark][fetch] {sql}")
141141
return self._spark.sql(sql).collect()
142142

143-
def save_table(self, full_name: str, rows: list[any], klass: type, mode: str = "append"):
143+
def save_table(self, full_name: str, rows: list[any], klass: dataclasses.dataclass, mode: str = "append"):
144144
rows = self._filter_none_rows(rows, full_name)
145145

146146
if len(rows) == 0:
@@ -152,7 +152,7 @@ def save_table(self, full_name: str, rows: list[any], klass: type, mode: str = "
152152

153153

154154
class CrawlerBase:
155-
def __init__(self, backend: SqlBackend, catalog: str, schema: str, table: str, klass: type):
155+
def __init__(self, backend: SqlBackend, catalog: str, schema: str, table: str, klass: dataclasses.dataclass):
156156
"""
157157
Initializes a CrawlerBase instance.
158158

tests/integration/hive_metastore/test_external_locations.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
logger = logging.getLogger(__name__)
1010

1111

12-
def test_table_inventory(ws, make_warehouse, make_schema):
12+
def test_external_locations(ws, make_warehouse, make_schema):
1313
warehouse_id = os.environ["TEST_DEFAULT_WAREHOUSE_ID"]
1414

1515
logger.info("setting up fixtures")
@@ -20,8 +20,8 @@ def test_table_inventory(ws, make_warehouse, make_schema):
2020
Table("hive_metastore", "foo", "bar", "EXTERNAL", "delta", location="dbfs:/mnt/foo/test3/table3"),
2121
]
2222
schema = make_schema()
23-
sbe.save_table(f"{schema}.tables", tables)
24-
sbe.save_table(f"{schema}.mounts", [Mount("/mnt/foo", "s3://bar")])
23+
sbe.save_table(f"{schema}.tables", tables, Table)
24+
sbe.save_table(f"{schema}.mounts", [Mount("/mnt/foo", "s3://bar")], Mount)
2525

2626
crawler = ExternalLocationCrawler(ws, sbe, schema.split(".")[1])
2727
results = crawler.snapshot()

0 commit comments

Comments
 (0)