in backend/plugins/sonarqube/tasks/issues_collector.go [50:268]
func CollectIssues(taskCtx plugin.SubTaskContext) (err errors.Error) {
logger := taskCtx.GetLogger()
logger.Info("collect issues")
iterator := helper.NewQueueIterator()
severities := []string{"BLOCKER", "CRITICAL", "MAJOR", "MINOR", "INFO"}
statuses := []string{"OPEN", "CONFIRMED", "REOPENED", "RESOLVED", "CLOSED"}
types := []string{"BUG", "VULNERABILITY", "CODE_SMELL"}
for _, severity := range severities {
for _, status := range statuses {
for _, typ := range types {
iterator.Push(
&SonarqubeIssueIteratorNode{
Severity: severity,
Status: status,
Type: typ,
CreatedAfter: nil,
CreatedBefore: nil,
FilePath: "",
},
)
}
}
}
rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_ISSUES_TABLE)
collector, err := helper.NewApiCollector(helper.ApiCollectorArgs{
RawDataSubTaskArgs: *rawDataSubTaskArgs,
ApiClient: data.ApiClient,
PageSize: 100,
UrlTemplate: "issues/search",
Input: iterator,
Query: func(reqData *helper.RequestData) (url.Values, errors.Error) {
query := url.Values{}
input, ok := reqData.Input.(*SonarqubeIssueIteratorNode)
if !ok {
return nil, errors.Default.New(fmt.Sprintf("Input to SonarqubeIssueIteratorNode failed:%+v", reqData.Input))
}
if input.FilePath != "" {
query.Del("facets")
query.Set("componentKeys", fmt.Sprintf("%v:%v", data.Options.ProjectKey, input.FilePath))
} else {
query.Set("componentKeys", fmt.Sprintf("%v", data.Options.ProjectKey))
query.Set("facets", "directories")
}
query.Set("severities", input.Severity)
query.Set("statuses", input.Status)
query.Set("types", input.Type)
if input.CreatedAfter != nil {
query.Set("createdAfter", GetFormatTime(input.CreatedAfter))
}
if input.CreatedBefore != nil {
query.Set("createdBefore", GetFormatTime(input.CreatedBefore))
}
query.Set("p", fmt.Sprintf("%v", reqData.Pager.Page))
query.Set("ps", fmt.Sprintf("%v", reqData.Pager.Size))
query.Encode()
return query, nil
},
GetTotalPages: func(res *http.Response, args *helper.ApiCollectorArgs) (int, errors.Error) {
body := &SonarqubePageInfo{}
err := helper.UnmarshalResponse(res, body)
if err != nil {
return 0, err
}
pages := body.Paging.Total / args.PageSize
if body.Paging.Total%args.PageSize > 0 {
pages++
}
query := res.Request.URL.Query()
// if get more than 10000 data, that need split it
if pages > MAXPAGES {
severity := query.Get("severities")
status := query.Get("statuses")
typ := query.Get("types")
var createdAfterUnix int64
var createdBeforeUnix int64
createdAfter, err := getTimeFromFormatTime(query.Get("createdAfter"))
if err != nil {
return 0, err
}
createdBefore, err := getTimeFromFormatTime(query.Get("createdBefore"))
if err != nil {
return 0, err
}
if createdAfter == nil {
createdAfterUnix = 0
} else {
createdAfterUnix = createdAfter.Unix()
}
if createdBefore == nil {
createdBeforeUnix = time.Now().Unix()
} else {
createdBeforeUnix = createdBefore.Unix()
}
// can not split it by time
if createdBeforeUnix-createdAfterUnix <= MININTERVAL {
// split it by dir/fil
for _, facet := range body.Facets {
for _, value := range facet.Values {
if value.Count <= MAXISSUECOUNT {
iterator.Push(&SonarqubeIssueIteratorNode{
Severity: severity,
Status: status,
Type: typ,
CreatedAfter: createdAfter,
CreatedBefore: createdBefore,
FilePath: value.Val,
})
logger.Info("split by dir, and it's issue count:[%d] and file path:[%s]", value.Count, value.Val)
} else {
// split it by dir when it's issue count > 10000
resWithPath, err := data.ApiClient.Get("issues/search", url.Values{
"componentKeys": {fmt.Sprintf("%v", data.Options.ProjectKey)},
"directories": {value.Val},
"facets": {"files"},
"ps": {"1"},
}, nil)
if err != nil {
return 0, err
}
bodyWithPath := &SonarqubePageInfo{}
err = helper.UnmarshalResponse(resWithPath, bodyWithPath)
if err != nil {
return 0, err
}
if len(bodyWithPath.Facets) != 1 {
return 0, errors.Default.New(fmt.Sprintf("the facets count [%d] is not 1", len(bodyWithPath.Facets)))
}
for _, value2 := range bodyWithPath.Facets[0].Values {
if value2.Count > MAXISSUECOUNT {
logger.Warn(fmt.Errorf("the issue count [%d] exceeds the maximum page size", value2.Count), "")
}
iterator.Push(&SonarqubeIssueIteratorNode{
Severity: severity,
Status: status,
Type: typ,
CreatedAfter: createdAfter,
CreatedBefore: createdBefore,
FilePath: value2.Val,
})
logger.Info("split by fil, and it's issue count:[%d] and file path:[%s]", value2.Count, value2.Val)
}
}
}
}
return 0, nil
}
// split it
MidTime := time.Unix((createdAfterUnix+createdBeforeUnix)/2+1, 0)
// left part
iterator.Push(&SonarqubeIssueIteratorNode{
Severity: severity,
Status: status,
Type: typ,
CreatedAfter: createdAfter,
CreatedBefore: &MidTime,
})
// right part
iterator.Push(&SonarqubeIssueIteratorNode{
Severity: severity,
Status: status,
Type: typ,
CreatedAfter: &MidTime,
CreatedBefore: createdBefore,
})
logger.Info("split [%s][%s] by mid [%s] for it has pages:[%d] and total:[%d]",
query.Get("createdAfter"), query.Get("createdBefore"), GetFormatTime(&MidTime), pages, body.Paging.Total)
return 0, nil
} else {
logger.Info("[%s][%s] has pages:[%d] and total:[%d]",
query.Get("createdAfter"), query.Get("createdBefore"), pages, body.Paging.Total)
return pages, nil
}
},
ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) {
var resData struct {
Data []json.RawMessage `json:"issues"`
}
err = helper.UnmarshalResponse(res, &resData)
if err != nil {
return nil, err
}
// check if sonar report updated during collecting
var issue struct {
UpdateDate *common.Iso8601Time `json:"updateDate"`
}
for _, v := range resData.Data {
err = errors.Convert(json.Unmarshal(v, &issue))
if err != nil {
return nil, err
}
if issue.UpdateDate.ToTime().After(data.TaskStartTime) {
return nil, errors.Default.New(fmt.Sprintf(`Your data is affected by the latest analysis\n
Please recollect this project: %s`, data.Options.ProjectKey))
}
}
return resData.Data, nil
},
})
if err != nil {
return err
}
return collector.Execute()
}