func parseProwConfig()

in prow/config/config.go [2099:2388]


func parseProwConfig(c *Config) error {
	if err := ValidateController(&c.Plank.Controller); err != nil {
		return fmt.Errorf("validating plank config: %w", err)
	}

	if c.Plank.PodPendingTimeout == nil {
		c.Plank.PodPendingTimeout = &metav1.Duration{Duration: 10 * time.Minute}
	}

	if c.Plank.PodRunningTimeout == nil {
		c.Plank.PodRunningTimeout = &metav1.Duration{Duration: 48 * time.Hour}
	}

	if c.Plank.PodUnscheduledTimeout == nil {
		c.Plank.PodUnscheduledTimeout = &metav1.Duration{Duration: 5 * time.Minute}
	}

	if c.Gerrit.TickInterval == nil {
		c.Gerrit.TickInterval = &metav1.Duration{Duration: time.Minute}
	}

	if c.Gerrit.RateLimit == 0 {
		c.Gerrit.RateLimit = 5
	}

	if len(c.GitHubReporter.JobTypesToReport) == 0 {
		c.GitHubReporter.JobTypesToReport = append(c.GitHubReporter.JobTypesToReport, prowapi.PresubmitJob, prowapi.PostsubmitJob)
	}

	// validate entries are valid job types
	// Currently only presubmit and postsubmit can be reported to github
	for _, t := range c.GitHubReporter.JobTypesToReport {
		if t != prowapi.PresubmitJob && t != prowapi.PostsubmitJob {
			return fmt.Errorf("invalid job_types_to_report: %v", t)
		}
	}

	for i := range c.JenkinsOperators {
		if err := ValidateController(&c.JenkinsOperators[i].Controller); err != nil {
			return fmt.Errorf("validating jenkins_operators config: %w", err)
		}
		sel, err := labels.Parse(c.JenkinsOperators[i].LabelSelectorString)
		if err != nil {
			return fmt.Errorf("invalid jenkins_operators.label_selector option: %w", err)
		}
		c.JenkinsOperators[i].LabelSelector = sel
		// TODO: Invalidate overlapping selectors more
		if len(c.JenkinsOperators) > 1 && c.JenkinsOperators[i].LabelSelectorString == "" {
			return errors.New("selector overlap: cannot use an empty label_selector with multiple selectors")
		}
		if len(c.JenkinsOperators) == 1 && c.JenkinsOperators[0].LabelSelectorString != "" {
			return errors.New("label_selector is invalid when used for a single jenkins-operator")
		}
	}

	for i, agentToTmpl := range c.Deck.ExternalAgentLogs {
		urlTemplate, err := template.New(agentToTmpl.Agent).Parse(agentToTmpl.URLTemplateString)
		if err != nil {
			return fmt.Errorf("parsing template for agent %q: %w", agentToTmpl.Agent, err)
		}
		c.Deck.ExternalAgentLogs[i].URLTemplate = urlTemplate
		// we need to validate selectors used by deck since these are not
		// sent to the api server.
		s, err := labels.Parse(c.Deck.ExternalAgentLogs[i].SelectorString)
		if err != nil {
			return fmt.Errorf("error parsing selector %q: %w", c.Deck.ExternalAgentLogs[i].SelectorString, err)
		}
		c.Deck.ExternalAgentLogs[i].Selector = s
	}

	if c.Deck.TideUpdatePeriod == nil {
		c.Deck.TideUpdatePeriod = &metav1.Duration{Duration: time.Second * 10}
	}

	if c.Deck.Spyglass.SizeLimit == 0 {
		c.Deck.Spyglass.SizeLimit = 100e6
	} else if c.Deck.Spyglass.SizeLimit <= 0 {
		return fmt.Errorf("invalid value for deck.spyglass.size_limit, must be >=0")
	}

	// Migrate the old `viewers` format to the new `lenses` format.
	var oldLenses []LensFileConfig
	for regex, viewers := range c.Deck.Spyglass.Viewers {
		for _, viewer := range viewers {
			lfc := LensFileConfig{
				RequiredFiles: []string{regex},
				Lens: LensConfig{
					Name: viewer,
				},
			}
			oldLenses = append(oldLenses, lfc)
		}
	}
	// Ensure the ordering is stable, because these are referenced by index elsewhere.
	sort.Slice(oldLenses, func(i, j int) bool { return oldLenses[i].Lens.Name < oldLenses[j].Lens.Name })
	c.Deck.Spyglass.Lenses = append(c.Deck.Spyglass.Lenses, oldLenses...)

	// Parse and cache all our regexes upfront
	c.Deck.Spyglass.RegexCache = make(map[string]*regexp.Regexp)
	for _, lens := range c.Deck.Spyglass.Lenses {
		toCompile := append(lens.OptionalFiles, lens.RequiredFiles...)
		for _, v := range toCompile {
			if _, ok := c.Deck.Spyglass.RegexCache[v]; ok {
				continue
			}
			r, err := regexp.Compile(v)
			if err != nil {
				return fmt.Errorf("cannot compile regexp %q, err: %w", v, err)
			}
			c.Deck.Spyglass.RegexCache[v] = r
		}
	}

	// Map old viewer names to the new ones for backwards compatibility.
	// TODO(Katharine, #10274): remove this, eventually.
	oldViewers := map[string]string{
		"build-log-viewer": "buildlog",
		"metadata-viewer":  "metadata",
		"junit-viewer":     "junit",
	}

	for re, viewers := range c.Deck.Spyglass.Viewers {
		for i, v := range viewers {
			if rename, ok := oldViewers[v]; ok {
				c.Deck.Spyglass.Viewers[re][i] = rename
			}
		}
	}

	if c.Deck.Spyglass.GCSBrowserPrefixes == nil {
		c.Deck.Spyglass.GCSBrowserPrefixes = make(map[string]string)
	}

	_, exists := c.Deck.Spyglass.GCSBrowserPrefixes["*"]
	if exists && c.Deck.Spyglass.GCSBrowserPrefix != "" {
		return fmt.Errorf("both gcs_browser_prefixes and gcs_browser_prefix['*'] are specified.")
	}

	if !exists {
		c.Deck.Spyglass.GCSBrowserPrefixes["*"] = c.Deck.Spyglass.GCSBrowserPrefix
	}

	if c.PushGateway.Interval == nil {
		c.PushGateway.Interval = &metav1.Duration{Duration: time.Minute}
	}

	if c.Sinker.ResyncPeriod == nil {
		c.Sinker.ResyncPeriod = &metav1.Duration{Duration: time.Hour}
	}

	if c.Sinker.MaxProwJobAge == nil {
		c.Sinker.MaxProwJobAge = &metav1.Duration{Duration: 7 * 24 * time.Hour}
	}

	if c.Sinker.MaxPodAge == nil {
		c.Sinker.MaxPodAge = &metav1.Duration{Duration: 24 * time.Hour}
	}

	if c.Sinker.TerminatedPodTTL == nil {
		c.Sinker.TerminatedPodTTL = &metav1.Duration{Duration: c.Sinker.MaxPodAge.Duration}
	}

	if c.Tide.SyncPeriod == nil {
		c.Tide.SyncPeriod = &metav1.Duration{Duration: time.Minute}
	}

	if c.Tide.StatusUpdatePeriod == nil {
		c.Tide.StatusUpdatePeriod = c.Tide.SyncPeriod
	}

	if c.Tide.MaxGoroutines == 0 {
		c.Tide.MaxGoroutines = 20
	}
	if c.Tide.MaxGoroutines <= 0 {
		return fmt.Errorf("tide has invalid max_goroutines (%d), it needs to be a positive number", c.Tide.MaxGoroutines)
	}

	if len(c.Tide.TargetURLs) > 0 && c.Tide.TargetURL != "" {
		return fmt.Errorf("tide.target_url and tide.target_urls are mutually exclusive")
	}

	if c.Tide.TargetURLs == nil {
		c.Tide.TargetURLs = map[string]string{}
	}
	if c.Tide.TargetURL != "" {
		c.Tide.TargetURLs["*"] = c.Tide.TargetURL
	}

	if c.Tide.PRStatusBaseURLs == nil {
		c.Tide.PRStatusBaseURLs = map[string]string{}
	}

	if len(c.Tide.PRStatusBaseURL) > 0 {
		if len(c.Tide.PRStatusBaseURLs) > 0 {
			return fmt.Errorf("both pr_status_base_url and pr_status_base_urls are defined")
		} else {
			logrus.Warning("The `pr_status_base_url` setting is deprecated and it has been replaced by `pr_status_base_urls`. It will be removed in June 2020")
			c.Tide.PRStatusBaseURLs["*"] = c.Tide.PRStatusBaseURL
		}
	}

	if len(c.Tide.PRStatusBaseURLs) > 0 {
		if _, ok := c.Tide.PRStatusBaseURLs["*"]; !ok {
			return fmt.Errorf("pr_status_base_urls is defined but the default value ('*') is missing")
		}
	}

	for name, method := range c.Tide.MergeType {
		if method != github.MergeMerge &&
			method != github.MergeRebase &&
			method != github.MergeSquash {
			return fmt.Errorf("merge type %q for %s is not a valid type", method, name)
		}
	}

	for name, templates := range c.Tide.MergeTemplate {
		if templates.TitleTemplate != "" {
			titleTemplate, err := template.New("CommitTitle").Parse(templates.TitleTemplate)

			if err != nil {
				return fmt.Errorf("parsing template for commit title: %w", err)
			}

			templates.Title = titleTemplate
		}

		if templates.BodyTemplate != "" {
			bodyTemplate, err := template.New("CommitBody").Parse(templates.BodyTemplate)

			if err != nil {
				return fmt.Errorf("parsing template for commit body: %w", err)
			}

			templates.Body = bodyTemplate
		}

		c.Tide.MergeTemplate[name] = templates
	}

	for i, tq := range c.Tide.Queries {
		if err := tq.Validate(); err != nil {
			return fmt.Errorf("tide query (index %d) is invalid: %w", i, err)
		}
	}

	if c.ProwJobNamespace == "" {
		c.ProwJobNamespace = "default"
	}
	if c.PodNamespace == "" {
		c.PodNamespace = "default"
	}

	if c.Plank.JobURLPrefixConfig == nil {
		c.Plank.JobURLPrefixConfig = map[string]string{}
	}

	if c.GitHubOptions.LinkURLFromConfig == "" {
		c.GitHubOptions.LinkURLFromConfig = "https://github.com"
	}
	linkURL, err := url.Parse(c.GitHubOptions.LinkURLFromConfig)
	if err != nil {
		return fmt.Errorf("unable to parse github.link_url, might not be a valid url: %w", err)
	}
	c.GitHubOptions.LinkURL = linkURL

	if c.StatusErrorLink == "" {
		c.StatusErrorLink = "https://github.com/kubernetes/test-infra/issues"
	}

	if c.LogLevel == "" {
		c.LogLevel = "info"
	}
	lvl, err := logrus.ParseLevel(c.LogLevel)
	if err != nil {
		return err
	}
	logrus.SetLevel(lvl)

	// Avoid using a job timeout of infinity by setting the default value to 24 hours
	if c.DefaultJobTimeout == nil {
		c.DefaultJobTimeout = &metav1.Duration{Duration: DefaultJobTimeout}
	}

	// Ensure Policy.Include and Policy.Exclude are mutually exclusive
	if len(c.BranchProtection.Include) > 0 && len(c.BranchProtection.Exclude) > 0 {
		return fmt.Errorf("Forbidden to set both Policy.Include and Policy.Exclude, Please use either Include or Exclude!")
	}

	return nil
}