in api/pages/forum/top-count.py [0:0]
def run(API, environ, indata, session):
# We need to be logged in for this!
if not session.user:
raise API.exception(403, "You must be logged in to use this API endpoint! %s")
now = time.time()
# First, fetch the view if we have such a thing enabled
viewList = []
if indata.get('view'):
viewList = session.getView(indata.get('view'))
if indata.get('subfilter'):
viewList = session.subFilter(indata.get('subfilter'), view = viewList)
dateTo = indata.get('to', int(time.time()))
dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span
####################################################################
####################################################################
dOrg = session.user['defaultOrganisation'] or "apache"
query = {
'query': {
'bool': {
'must': [
{'range':
{
'created': {
'from': dateFrom,
'to': dateTo
}
}
},
{
'term': {
'organisation': dOrg
}
}
]
}
}
}
# Source-specific or view-specific??
if indata.get('source'):
query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}})
elif viewList:
query['query']['bool']['must'].append({'terms': {'sourceID': viewList}})
if indata.get('email'):
query['query']['bool']['should'] = [
{'term': {'creator': indata.get('email')}}
]
# Get top 25 committers this period
query['aggs'] = {
'by_repo': {
'terms': {
'field': 'sourceID',
'size': 5000
}
}
}
res = session.DB.ES.search(
index=session.DB.dbname,
doc_type="forum_post",
size = 0,
body = query
)
toprepos = []
for bucket in res['aggregations']['by_repo']['buckets']:
ID = bucket['key']
if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID):
it = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID)['_source']
repo = re.sub(r".+/([^/]+)$", r"\1", it['sourceURL'])
count = bucket['doc_count']
toprepos.append([repo, count])
toprepos = sorted(toprepos, key = lambda x: x[1], reverse = True)
top = toprepos[0:24]
if len(toprepos) > 25:
count = 0
for repo in toprepos[25:]:
count += repo[1]
top.append(["Other forums", count])
tophash = {}
for v in top:
tophash[v[0]] = v[1]
JSON_OUT = {
'counts': tophash,
'okay': True,
'responseTime': time.time() - now,
}
yield json.dumps(JSON_OUT)