Skip to content

Commit 5674241

Browse files
committed
reverting int test failure.
1 parent 84a3f6e commit 5674241

File tree

5 files changed

+14
-23
lines changed

5 files changed

+14
-23
lines changed

src/databricks/labs/ucx/cli.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,7 @@ def validate_external_locations():
9898
sql_backend = StatementExecutionBackend(ws, installation.config.warehouse_id)
9999
location_crawler = ExternalLocations(ws, sql_backend, installation.config.inventory_database)
100100
path = location_crawler.save_as_terraform_definitions_on_workspace()
101-
if (
102-
len(path) > 0
103-
and prompts.confirm(f"external_locations.tf file written to {path}. Do you want to open it?") == "Yes"
104-
):
101+
if len(path) > 0 and prompts.confirm(f"external_locations.tf file written to {path}. Do you want to open it?"):
105102
webbrowser.open(f"{ws.config.host}/#workspace{path}")
106103

107104

src/databricks/labs/ucx/install.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,6 @@ def __init__(
139139
promtps: Prompts | None = None,
140140
wheels: Wheels | None = None,
141141
sql_backend: SqlBackend | None = None,
142-
skip_prompts: bool = False,
143142
):
144143
if "DATABRICKS_RUNTIME_VERSION" in os.environ:
145144
msg = "WorkspaceInstaller is not supposed to be executed in Databricks Runtime"
@@ -157,7 +156,6 @@ def __init__(
157156
self._dashboards: dict[str, str] = {}
158157
self._state = InstallState(ws, self._install_folder)
159158
self._install_override_clusters = None
160-
self._skip_prompts = skip_prompts
161159

162160
def run(self):
163161
logger.info(f"Installing UCX v{self._wheels.version()}")
@@ -193,15 +191,15 @@ def _install_spark_config_for_hms_lineage(self):
193191
if gscript.enabled:
194192
logger.info("Already exists and enabled. Skipped creating a new one.")
195193
elif not gscript.enabled:
196-
if self._skip_prompts or self._prompts.confirm(
194+
if self._prompts.confirm(
197195
"Your Global Init Script with required spark config is disabled, Do you want to enable it?"
198196
):
199197
logger.info("Enabling Global Init Script...")
200198
hms_lineage.enable_global_init_script(gscript)
201199
else:
202200
logger.info("No change to Global Init Script is made.")
203201
elif not gscript:
204-
if self._skip_prompts or self._prompts.confirm(
202+
if self._prompts.confirm(
205203
"No Global Init Script with Required Spark Config exists, Do you want to create one?"
206204
):
207205
logger.info("Creating Global Init Script...")
@@ -217,10 +215,9 @@ def run_for_config(
217215
wheels: Wheels | None = None,
218216
override_clusters: dict[str, str] | None = None,
219217
sql_backend: SqlBackend | None = None,
220-
skip_prompts: bool = False,
221218
) -> "WorkspaceInstaller":
222219
workspace_installer = WorkspaceInstaller(
223-
ws, prefix=prefix, promtps=promtps, wheels=wheels, sql_backend=sql_backend, skip_prompts=skip_prompts
220+
ws, prefix=prefix, promtps=promtps, wheels=wheels, sql_backend=sql_backend
224221
)
225222
logger.info(f"Installing UCX v{workspace_installer._wheels.version()} on {ws.config.host}")
226223
workspace_installer._config = config
@@ -555,7 +552,7 @@ def _create_readme(self):
555552
self._ws.workspace.upload(path, intro.encode("utf8"), overwrite=True)
556553
url = self.notebook_link(path)
557554
logger.info(f"Created README notebook with job overview: {url}")
558-
if self._skip_prompts or self._prompts.confirm("Open job overview in README notebook in your home directory?"):
555+
if self._prompts.confirm("Open job overview in README notebook in your home directory?"):
559556
webbrowser.open(url)
560557

561558
def _replace_inventory_variable(self, text: str) -> str:
@@ -803,7 +800,7 @@ def latest_job_status(self) -> list[dict]:
803800
return latest_status
804801

805802
def uninstall(self):
806-
if self._skip_prompts or self._prompts.confirm(
803+
if self._prompts.confirm(
807804
"Do you want to uninstall ucx from the workspace too, this would "
808805
"remove ucx project folder, dashboards, queries and jobs"
809806
):
@@ -825,7 +822,7 @@ def uninstall(self):
825822
logger.info("UnInstalling UCX complete")
826823

827824
def _remove_database(self):
828-
if self._skip_prompts or self._prompts.confirm(
825+
if self._prompts.confirm(
829826
f"Do you want to delete the inventory database {self._current_config.inventory_database} too?"
830827
):
831828
logger.info(f"Deleting inventory database {self._current_config.inventory_database}")

tests/integration/assessment/test_azure.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ def test_spn_crawler(ws, inventory_schema, make_job, make_pipeline, sql_backend)
2626
results.append(spn)
2727

2828
assert len(results) >= 2
29-
assert results[1].storage_account == _TEST_STORAGE_ACCOUNT
30-
assert results[1].tenant_id == _TEST_TENANT_ID
29+
assert results[0].storage_account == _TEST_STORAGE_ACCOUNT
30+
assert results[0].tenant_id == _TEST_TENANT_ID
3131

3232

3333
@retried(on=[NotFound], timeout=timedelta(minutes=5))

tests/integration/test_installation.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ def test_job_failure_propagates_correct_error_message_and_logs(ws, sql_backend,
4040
"main": default_cluster_id,
4141
"tacl": tacl_cluster_id,
4242
},
43-
skip_prompts=True,
4443
)
4544

4645
sql_backend.execute(f"DROP SCHEMA {inventory_database} CASCADE")
@@ -157,7 +156,6 @@ def test_jobs_with_no_inventory_database(
157156
"main": default_cluster_id,
158157
"tacl": tacl_cluster_id,
159158
},
160-
skip_prompts=True,
161159
)
162160

163161
try:
@@ -262,7 +260,6 @@ def test_uninstallation(
262260
"main": default_cluster_id,
263261
"tacl": tacl_cluster_id,
264262
},
265-
skip_prompts=True,
266263
)
267264
install_folder = install._install_folder
268265
assessment_job_id = install._state.jobs["assessment"]

tests/integration/workspace_access/test_tacl.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def test_permission_for_files_anonymous_func(sql_backend, inventory_schema, make
1717
group_d = make_group()
1818

1919
sql_backend.execute(f"GRANT READ_METADATA ON ANY FILE TO `{group_a.display_name}`")
20-
sql_backend.execute(f"GRANT SELECT ON ANONYMOUS FUNCTION TO `{group_b.display_name}`")
20+
sql_backend.execute(f"GRANT SELECT ON ANONYMOUS FUNCTION TO `{group_c.display_name}`")
2121

2222
tables = StaticTablesCrawler(sql_backend, inventory_schema, [])
2323
grants = GrantsCrawler(tables)
@@ -35,17 +35,17 @@ def test_permission_for_files_anonymous_func(sql_backend, inventory_schema, make
3535
for any_file_grant in grants._grants(any_file=True):
3636
any_file_actual[any_file_grant.principal] = any_file_grant.action_type
3737

38-
assert group_c.display_name in any_file_actual
39-
assert any_file_actual[group_c.display_name] == "READ_METADATA"
40-
assert any_file_actual[group_a.display_name] == any_file_actual[group_c.display_name]
38+
assert group_b.display_name in any_file_actual
39+
assert any_file_actual[group_b.display_name] == "READ_METADATA"
40+
assert any_file_actual[group_a.display_name] == any_file_actual[group_b.display_name]
4141

4242
anonymous_function_actual = {}
4343
for ano_func_grant in grants._grants(anonymous_function=True):
4444
anonymous_function_actual[ano_func_grant.principal] = ano_func_grant.action_type
4545

4646
assert group_d.display_name in anonymous_function_actual
4747
assert anonymous_function_actual[group_d.display_name] == "SELECT"
48-
assert anonymous_function_actual[group_b.display_name] == anonymous_function_actual[group_d.display_name]
48+
assert anonymous_function_actual[group_c.display_name] == anonymous_function_actual[group_d.display_name]
4949

5050

5151
def test_owner_permissions_for_tables_and_schemas(sql_backend, inventory_schema, make_schema, make_table, make_group):

0 commit comments

Comments
 (0)