in tools/unsupervised_dataset/sql_crawler/crawler_log.py [0:0]
def close(self):
""" Flushes remaining querise and closes the crawler log. Uploads file
to Google Cloud. Prints message if there are handled errors logged
during crawling process.
"""
logging.info("Finished crawling.")
# Flush remaining queries and close file
self.flush_data(self.batch_data)
if not self.stream:
self.csv_file.close()
# Save file to GCS, if applicable
file_name = "queries_{0}".format(self.start_time)
if self.save_to_gcs:
status, message = cloud_integration.upload_gcs_file(self.gcs_project,
self.gcs_bucket, file_name, self.query_name)
if status:
logging.info(message)
else:
self.log_error(message)
if self.error_log_count > 0:
print("Logged {0} errors. See log for details.".format(self.error_log_count))