in migration/bring-your-own-s3-tables/bring_your_own_s3_table_bucket.py [0:0]
def _add_lf_admin(lf_client, account_id, execute_flag):
data_lake_settings = lf_client.get_data_lake_settings()
print(f"Checking current Data lake administrators:")
pprint(f"{json.dumps(data_lake_settings, sort_keys=True)}\n")
redshift_principal = f"arn:aws:iam::{account_id}:role/aws-service-role/redshift.amazonaws.com/AWSServiceRoleForRedshift"
if execute_flag:
# Check if principal already exists
principal_exists = any(
admin.get('DataLakePrincipalIdentifier') == redshift_principal
for admin in data_lake_settings['DataLakeSettings']['ReadOnlyAdmins']
)
if not principal_exists:
data_lake_settings['DataLakeSettings']['ReadOnlyAdmins'].append({
'DataLakePrincipalIdentifier': f"arn:aws:iam::{account_id}:role/aws-service-role/redshift.amazonaws.com/AWSServiceRoleForRedshift"
})
lf_client.put_data_lake_settings(
DataLakeSettings = data_lake_settings['DataLakeSettings']
)
print(f"Successfully added AWSServiceRoleForRedshift role as Data lake ReadOnlyAdmins\n")
else:
print(f"Skip adding AWSServiceRoleForRedshift role as Data lake ReadOnlyAdmins, set --execute flag to True to do the actual update\n")