The following example shows how to create a DeltaLake DATALAKE that uses Databricks managed service principle for the catalog connection and an Azure IDP managed principle for storage access.
Define the authorization for catalog:
CREATE AUTHORIZATION simplified_azure_dbx_catalog_unity
USER '<<dbx_client_id>>'
PASSWORD '<<dbx_client_secret_key>>'
Define authorization for Storage access:
CREATE AUTHORIZATION simplified_azure_idp_auth_unity
USER '<<azure_ad_service_principal_client_id>>'
PASSWORD '<<azure_ad_service_pricipal_secret_key>>';
Create a DeltaLake DATALAKE object referencing the two AUTH objects:
CREATE DATALAKE deltawrite_simplified_unity_02
EXTERNAL SECURITY CATALOG simplified_azure_dbx_catalog_unity,
EXTERNAL SECURITY STORAGE simplified_azure_idp_auth_unity
USING
catalog_type ('unity')
catalog_location ('https://adb-8074943983864086.6.azuredatabricks.net/api')
unity_catalog_name ('deltalake_test')
storage_account_name ('regicebergstorageacct')
tenant_id ('391c8c4c-6a2a-40fd-ab98-226b6baa5155')
default_cluster_id ('0210-232334-ab0q59t3')
storage_location ('abfss://otf-330spark-51hdi-publ-2024-06-13t06-52-39-186z@icebergstorageeastus2.dfs.core.windows.net/')
container_name('otf-330spark-51hdi-publ-2024-06-13t06-52-39-186z')
storage_region('East US 2')
data_reader_type ('iceberg_generic_reader')
TABLE FORMAT DELTALAKE;