def read_view_as_spark()

in webinars/snowflake_2021-09/finspace_spark.py [0:0]


    def read_view_as_spark(
        self,
        dataset_id: str,
        view_id: str
        ):
        # TODO: switch to DescribeMatz when available in HFS
        views = self.list_views(dataset_id=dataset_id, max_results=50)
        filtered = [v for v in views if v['id'] == view_id]

        if len(filtered) == 0:
            raise Exception('No such view found')
        if len(filtered) > 1:
            raise Exception('Internal Server error')
        view = filtered[0]
        
        # 0. Ensure view is ready to be read
        if (view['status'] != 'SUCCESS'): 
            status = view['status'] 
            print(f'view run status is not ready: {status}. Returning empty.')
            return

        glue_db_name = view['destinationTypeProperties']['databaseName']
        glue_table_name = view['destinationTypeProperties']['tableName']
        
        # Query Glue table directly with catalog function of spark
        return self.spark.table(f"`{glue_db_name}`.`{glue_table_name}`")