Skip to content

Commit 9f6cfc9

Browse files
committed
add unit test for _read_databricks_secret
1 parent 7dc14a1 commit 9f6cfc9

File tree

1 file changed

+34
-50
lines changed

1 file changed

+34
-50
lines changed
Lines changed: 34 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,20 @@
1+
import logging
2+
import pytest
3+
14
from unittest.mock import MagicMock, create_autospec
25

36
from databricks.sdk import WorkspaceClient
7+
from databricks.sdk.errors import (
8+
InternalError,
9+
ResourceDoesNotExist
10+
)
411
from databricks.sdk.service.catalog import (
512
AwsIamRole,
613
AzureManagedIdentity,
714
AzureServicePrincipal,
815
StorageCredentialInfo,
916
)
17+
from databricks.sdk.service.workspace import GetSecretResponse
1018

1119
from databricks.labs.ucx.assessment.azure import StoragePermissionMapping
1220
from databricks.labs.ucx.migration.azure_credentials import (
@@ -41,53 +49,29 @@ def test_list_storage_credentials():
4149
assert expected == sp_migration._list_storage_credentials()
4250

4351

44-
def test_sp_in_storage_credentials():
45-
storage_credentials_app_ids = {"no_match_id_1", "client_id_1", "client_id_2"}
46-
47-
sp_no_match1 = StoragePermissionMapping(
48-
prefix="prefix3",
49-
client_id="client_id_3",
50-
principal="principal_3",
51-
privilege="READ_FILES",
52-
directory_id="directory_id_3",
53-
)
54-
sp_match1 = StoragePermissionMapping(
55-
prefix="prefix1",
56-
client_id="client_id_1",
57-
principal="principal_1",
58-
privilege="WRITE_FILES",
59-
directory_id="directory_id_1",
60-
)
61-
sp_match2 = StoragePermissionMapping(
62-
prefix="prefix2",
63-
client_id="client_id_2",
64-
principal="principal_2",
65-
privilege="WRITE_FILES",
66-
directory_id="directory_id_2",
67-
)
68-
service_principals = [sp_no_match1, sp_match1, sp_match2]
69-
70-
sp_migration = AzureServicePrincipalMigration(MagicMock(), MagicMock(), MagicMock(), MagicMock())
71-
72-
filtered_sp_list = sp_migration._check_sp_in_storage_credentials(service_principals, storage_credentials_app_ids)
73-
74-
assert filtered_sp_list == [sp_no_match1]
75-
76-
77-
def test_sp_with_empty_storage_credentials():
78-
storage_credentials_app_ids = {}
79-
80-
sp_no_match1 = StoragePermissionMapping(
81-
prefix="prefix3",
82-
client_id="client_id_3",
83-
principal="principal_3",
84-
privilege="READ_FILES",
85-
directory_id="directory_id_3",
86-
)
87-
service_principals = [sp_no_match1]
88-
89-
sp_migration = AzureServicePrincipalMigration(MagicMock(), MagicMock(), MagicMock(), MagicMock())
90-
91-
filtered_sp_list = sp_migration._check_sp_in_storage_credentials(service_principals, storage_credentials_app_ids)
92-
93-
assert filtered_sp_list == [sp_no_match1]
52+
@pytest.mark.parametrize("secret_bytes_value, expected_return",
53+
[(GetSecretResponse(value="aGVsbG8gd29ybGQ="), "hello world"),
54+
(GetSecretResponse(value="T2zhLCBNdW5kbyE="), None)
55+
])
56+
def test_read_secret_value_decode(secret_bytes_value, expected_return):
57+
w = create_autospec(WorkspaceClient)
58+
w.secrets.get_secret.return_value = secret_bytes_value
59+
60+
sp_migration = AzureServicePrincipalMigration(MagicMock(), w, MagicMock(), MagicMock())
61+
assert sp_migration._read_databricks_secret("test_scope","test_key", "000") == expected_return
62+
63+
64+
@pytest.mark.parametrize("exception, expected_log, expected_return",
65+
[(ResourceDoesNotExist(), "Will not reuse this client_secret", None),
66+
(InternalError(), "Will not reuse this client_secret", None)
67+
])
68+
def test_read_secret_read_exception(caplog, exception, expected_log, expected_return):
69+
caplog.set_level(logging.INFO)
70+
w = create_autospec(WorkspaceClient)
71+
w.secrets.get_secret.side_effect = exception
72+
73+
sp_migration = AzureServicePrincipalMigration(MagicMock(), w, MagicMock(), MagicMock())
74+
secret_value = sp_migration._read_databricks_secret("test_scope","test_key", "000")
75+
76+
assert expected_log in caplog.text
77+
assert secret_value == expected_return

0 commit comments

Comments
 (0)