in Administrative_Dashboard/lambda_functions/data_prepare/data_prepare.py [0:0]
def lambda_handler(event, context):
aws_region = str(event['detail']['awsRegion'])
sts_client = boto3.client("sts", region_name=aws_region)
account_id = sts_client.get_caller_identity()["Account"]
# call s3 bucket
s3 = boto3.resource('s3')
bucketname = 'administrative-dashboard' + account_id
bucket = s3.Bucket(bucketname)
key = 'monitoring/quicksight/group_membership/group_membership.csv'
key2 = 'monitoring/quicksight/object_access/object_access.csv'
tmpdir = tempfile.mkdtemp()
local_file_name = 'group_membership.csv'
local_file_name2 = 'object_access.csv'
path = os.path.join(tmpdir, local_file_name)
print(path)
lists = []
access = []
users = list_users(account_id, aws_region)
for user in users:
groups = list_user_groups(user['UserName'], account_id, aws_region)
if len(groups) == 0:
lists.append([aws_region, None, user['UserName']])
else:
for group in groups:
lists.append([aws_region, group['GroupName'], user['UserName']])
print(len(lists))
print(lists)
with open(path, 'w', newline='') as outfile:
writer = csv.writer(outfile)
for line in lists:
writer.writerow(line)
bucket.upload_file(path, key)
path = os.path.join(tmpdir, local_file_name2)
print(path)
dashboards = list_dashboards(account_id, aws_region)
for dashboard in dashboards:
dashboardid = dashboard['DashboardId']
response = describe_dashboard_permissions(account_id, dashboardid, aws_region)
permissions = response['Permissions']
for principal in permissions:
actions = '|'.join(principal['Actions'])
principal = principal['Principal'].split("/")
ptype = principal[0].split(":")
ptype = ptype[-1]
additional_info = principal[-2]
principal = principal[-1]
access.append(
[aws_region, 'dashboard', dashboard['Name'], dashboardid, ptype, principal, additional_info, actions])
datasets = list_datasets(account_id, aws_region)
for dataset in datasets:
if dataset['Name'] not in ['Business Review', 'People Overview', 'Sales Pipeline',
'Web and Social Media Analytics']:
datasetid = dataset['DataSetId']
response = describe_data_set_permissions(account_id, datasetid, aws_region)
permissions = response['Permissions']
for principal in permissions:
actions = '|'.join(principal['Actions'])
principal = principal['Principal'].split("/")
ptype = principal[0].split(":")
ptype = ptype[-1]
additional_info = principal[-2]
principal = principal[-1]
access.append(
[aws_region, 'dataset', dataset['Name'], datasetid, ptype, principal, additional_info, actions])
datasources = list_datasources(account_id, aws_region)
for datasource in datasources:
print(datasource)
if datasource['Name'] not in ['Business Review', 'People Overview', 'Sales Pipeline',
'Web and Social Media Analytics']:
datasourceid = datasource['DataSourceId']
if 'DataSourceParameters' in datasource:
print(datasourceid)
try:
response = describe_data_source_permissions(account_id, datasourceid, aws_region)
print(response)
permissions = response['Permissions']
print(permissions)
for principal in permissions:
actions = '|'.join(principal['Actions'])
principal = principal['Principal'].split("/")
ptype = principal[0].split(":")
ptype = ptype[-1]
additional_info = principal[-2]
principal = principal[-1]
access.append([aws_region, 'data_source', datasource['Name'], datasourceid, ptype, principal,
additional_info, actions])
except Exception as e:
pass
print(access)
with open(path, 'w', newline='') as outfile:
writer = csv.writer(outfile)
for line in access:
writer.writerow(line)
# upload file from tmp to s3 key
bucket.upload_file(path, key2)