def lambda_handler()

in Admin_Console/Dataset_info.py [0:0]


def lambda_handler(event, context):
    sts_client = boto3.client("sts", region_name=aws_region)
    account_id = sts_client.get_caller_identity()["Account"]

    # call s3 bucket
    s3 = boto3.resource('s3')
    bucketname = 'admin-console' + account_id
    bucket = s3.Bucket(bucketname)

    key = 'monitoring/quicksight/datsets_info/datsets_info.csv'
    key2 = 'monitoring/quicksight/datsets_ingestion/datsets_ingestion.csv'
    key3 = 'monitoring/quicksight/data_dictionary/data_dictionary.csv'
    tmpdir = tempfile.mkdtemp()
    local_file_name = 'datsets_info.csv'
    local_file_name2 = 'datsets_ingestion.csv'
    local_file_name3 = 'data_dictionary.csv'
    path = os.path.join(tmpdir, local_file_name)
    # print(path)

    path2 = os.path.join(tmpdir, local_file_name2)
    # print(path2)

    path3 = os.path.join(tmpdir, local_file_name3)
    # print(path3)

    access = []

    data_dictionary = []

    dashboards = list_dashboards(account_id, lambda_aws_region)

    for dashboard in dashboards:
        dashboardid = dashboard['DashboardId']

        response = describe_dashboard(account_id, dashboardid, lambda_aws_region)
        Dashboard = response['Dashboard']
        Name = Dashboard['Name']
        # print(Name)
        Sourceid = Dashboard['Version']['SourceEntityArn'].split("/")
        # print(Sourceid)
        Sourceid = Sourceid[-1]
        # print(Sourceid)
        try:
            Source = describe_analysis(account_id, Sourceid, lambda_aws_region)
            SourceName = Source['Analysis']['Name']
            # print(SourceName)
        except Exception as e:
            if str(e).find('is not found'):
                pass
            else:
                raise e

        DataSetArns = Dashboard['Version']['DataSetArns']
        # print(DataSetArns)
        for ds in DataSetArns:
            dsid = ds.split("/")
            dsid = dsid[-1]
            # print(dsid)
            try:
                dataset = describe_data_set(account_id, dsid, lambda_aws_region)
                dsname = dataset['DataSet']['Name']
                print(dsname)
                LastUpdatedTime = dataset['DataSet']['LastUpdatedTime']
                print(LastUpdatedTime)
                PhysicalTableMap = dataset['DataSet']['PhysicalTableMap']
                print(PhysicalTableMap)
                for sql in PhysicalTableMap:
                    # print(sql)
                    sql = PhysicalTableMap[sql]
                    print(sql)
                    if 'RelationalTable' in sql:
                        DataSourceArn = sql['RelationalTable']['DataSourceArn']
                        DataSourceid = DataSourceArn.split("/")
                        DataSourceid = DataSourceid[-1]
                        datasource = describe_data_source(account_id, DataSourceid, lambda_aws_region)
                        datasourcename = datasource['DataSource']['Name']
                        Catalog = sql['RelationalTable']['Catalog']
                        Schema = sql['RelationalTable']['Schema']
                        sqlName = sql['RelationalTable']['Name']

                        access.append(
                            [lambda_aws_region, Name, dashboardid, SourceName, Sourceid, dsname, dsid, LastUpdatedTime,
                             datasourcename, DataSourceid, Catalog, Schema, sqlName])
                        print(access)

                    if 'CustomSql' in sql:
                        DataSourceArn = sql['CustomSql']['DataSourceArn']
                        DataSourceid = DataSourceArn.split("/")
                        DataSourceid = DataSourceid[-1]
                        datasource = describe_data_source(account_id, DataSourceid, lambda_aws_region)
                        datasourcename = datasource['DataSource']['Name']
                        SqlQuery = sql['CustomSql']['SqlQuery'].replace("\n", " ")
                        sqlName = sql['CustomSql']['Name']

                        access.append(
                            [lambda_aws_region, Name, dashboardid, SourceName, Sourceid, dsname, dsid, LastUpdatedTime,
                             datasourcename, DataSourceid, 'N/A', sqlName, SqlQuery])
                    print(access)

            except Exception as e:
                if str(e).find('flat file'):
                    pass
                else:
                    raise e



    print(access)
    with open(path, 'w', newline='') as outfile:
        writer = csv.writer(outfile, delimiter='|')
        for line in access:
            writer.writerow(line)
    outfile.close()
    # upload file from tmp to s3 key

    bucket.upload_file(path, key)

    datasets = list_datasets(account_id, lambda_aws_region)
    for item in datasets:
        try:
            dsid = item['DataSetId']
            datasetname = item['Name']
            dataset_details = describe_data_set(account_id, dsid, lambda_aws_region)
            OutputColumns = dataset_details['DataSet']['OutputColumns']
            for column in OutputColumns:
                columnname = column['Name']
                columntype = column['Type']
                if 'Description' in column.keys():
                    columndesc = column['Description']
                else:
                    columndesc = None
                data_dictionary.append(
                    [datasetname, dsid, columnname, columntype, columndesc]
                )
        except Exception as e:
            if str(e).find('data set type is not supported'):
                pass
            else:
                raise e

    print(data_dictionary)
    with open(path3, 'w', newline='') as outfile:
        writer = csv.writer(outfile, delimiter=',')
        for line in data_dictionary:
            writer.writerow(line)
    outfile.close()
    # upload file from tmp to s3 key
    bucket.upload_file(path3, key3)