in glam/api/views.py [0:0]
def get_glean_aggregations_from_pg(request, **kwargs):
REQUIRED_QUERY_PARAMETERS = [
"aggregationLevel",
"app_id",
"ping_type",
"probe",
"product",
]
if any([k not in kwargs.keys() for k in REQUIRED_QUERY_PARAMETERS]):
# Figure out which query parameter is missing.
missing = set(REQUIRED_QUERY_PARAMETERS) - set(kwargs.keys())
raise ValidationError(
"Missing required query parameters: {}".format(", ".join(sorted(missing)))
)
MODEL_MAP = {
"fenix": FenixAggregationView,
"fog": FOGAggregationView,
}
model = MODEL_MAP[kwargs.get("product")]
product = kwargs.get("product")
probe = kwargs["probe"]
num_versions = kwargs.get("versions", 3)
try:
versions = list(
model.objects.filter(Q(metric=probe))
.order_by("-version")
.values_list("version", flat=True)
.distinct("version")[:num_versions]
)
except (ValueError, KeyError):
raise ValidationError("Query version cannot be determined")
except TypeError:
# This happens when `version` is NULL,
# suggesting that we have no data for this model.
raise NotFound("No data found for the provided parameters")
app_id = kwargs["app_id"]
ping_type = kwargs["ping_type"]
os = kwargs.get("os", "*")
dimensions = [
Q(app_id=app_id),
Q(metric=probe),
Q(ping_type=ping_type),
Q(version__in=versions),
Q(os=os),
]
aggregation_level = kwargs["aggregationLevel"]
# Whether to pull aggregations by version or build_id.
if aggregation_level == "version":
if product == "fenix":
dimensions.append(Q(build_id="*"))
# counts = _get_fenix_counts(app_id, versions, ping_type,
# os, by_build=False)
if product == "fog":
dimensions.append(~Q(build_id="*"))
# counts = _get_fog_counts(app_id, versions, ping_type, os, by_build=False)
if aggregation_level == "build_id":
if product == "fenix":
dimensions.append(~Q(build_id="*"))
# counts = _get_fenix_counts(app_id, versions, ping_type, os, by_build=True)
if product == "fog":
dimensions.append(~Q(build_id="*"))
# counts = _get_fog_counts(app_id, versions, ping_type, os, by_build=True)
result = model.objects.filter(*dimensions)
response = []
for row in result:
data = {
"version": row.version,
"ping_type": row.ping_type,
"os": row.os,
"build_id": row.build_id,
"build_date": row.build_date,
"metric": row.metric,
"metric_type": row.metric_type,
"metric_key": row.metric_key,
"client_agg_type": row.client_agg_type,
"total_users": row.total_users,
"sample_count": row.total_sample,
"histogram": row.histogram and orjson.loads(row.histogram) or "",
"percentiles": row.percentiles and orjson.loads(row.percentiles) or "",
}
# Get the total distinct client IDs for this set of dimensions.
# data["total_addressable_market"] = counts.get(f"{row.version}-{row.build_id}")
response.append(data)
_log_probe_query(request)
return response