in glam/api/management/commands/import_glean_counts.py [0:0]
def handle(self, bucket, *args, **options):
gcs_client = storage.Client()
bucket = gcs_client.get_bucket(bucket)
for product, opts in MAPPING.items():
model = apps.get_model(opts["model"])
schema = opts["schema_name"]
for app_id in opts["apps"]:
filename = f"glam-extract-{schema}_glam_{app_id}-counts.csv"
blob = bucket.get_blob(filename)
if not blob:
continue
# Create temp table for data.
tmp_table = f"tmp_import_{product}_counts"
log(f"Creating temp table for import: {tmp_table}.")
with connection.cursor() as cursor:
cursor.execute(f"DROP TABLE IF EXISTS {tmp_table}")
cursor.execute(
f"CREATE TABLE {tmp_table} (LIKE glam_{product}_counts)"
)
cursor.execute(
f"ALTER TABLE {tmp_table} DROP COLUMN id, DROP COLUMN app_id"
)
# Download CSV file to local filesystem.
fp = tempfile.NamedTemporaryFile()
log(f"Copying GCS file {blob.name} to local file {fp.name}.")
blob.download_to_filename(fp.name)
# Load CSV into temp table & insert data from temp table into
# aggregation tables, using upserts.
self.import_file(tmp_table, fp, model, app_id)
# Drop temp table and remove file.
log("Dropping temp table.")
with connection.cursor() as cursor:
cursor.execute(f"DROP TABLE {tmp_table}")
log(f"Deleting local file: {fp.name}.")
fp.close()