async build()

in parsers/LU/JS/packages/lu/src/parser/qnabuild/builder.ts [142:290]


  async build(
    qnaContents: any[],
    recognizers: Map<string, Recognizer>,
    subscriptionkey: string,
    endpoint: string,
    botName: string,
    suffix: string,
    fallbackLocale: string,
    multiRecognizers?: Map<string, MultiLanguageRecognizer>,
    settings?: Settings,
    crosstrainedRecognizers?: Map<string, CrossTrainedRecognizer>,
    dialogType?: string) {
    // qna api TPS which means concurrent transactions to qna maker api in 1 second
    let qnaApiTps = 3

    // set qna maker call delay duration to 1100 millisecond because 1000 can hit corner case of rate limit
    let delayDuration = 1100

    //default returned recognizer values
    let recognizerValues: Recognizer[] = []

    let multiRecognizerValues: MultiLanguageRecognizer[] = []

    let settingsValue: any

    let crosstrainedRecognizerValues: CrossTrainedRecognizer[] = []

    // filter if all qna contents are emtty
    let isAllQnAEmpty = fileHelper.isAllFilesSectionEmpty(qnaContents)

    if (!isAllQnAEmpty) {
      const qnaBuildCore = new QnaBuildCore(subscriptionkey, endpoint)
      const kbs = (await qnaBuildCore.getKBList()).knowledgebases

      // here we do a while loop to make full use of qna tps capacity
      while (qnaContents.length > 0) {
        // get a number(set by qnaApiTps) of contents for each loop
        const subQnaContents = qnaContents.splice(0, qnaApiTps)

        // concurrently handle applications
        await Promise.all(subQnaContents.map(async content => {
          // init current kb object from qna content
          const qnaObj = await this.initQnaFromContent(content, botName, suffix)
          let currentKB = qnaObj.kb
          let currentAlt = qnaObj.alterations
          let culture = content.language as string

          let hostName = ''

          // get recognizer
          let recognizersOfContentCulture: Recognizer[] = []
          for (let [dialogFileName, recognizer] of recognizers) {
            const fileNameSplit = dialogFileName.split('.')
            if (fileNameSplit[fileNameSplit.length - 2] === culture) {
              // find if there is a matched name with current kb under current authoring key
              if (!recognizer.getKBId()) {
                for (let kb of kbs) {
                  if (kb.name === currentKB.name) {
                    recognizer.setKBId(kb.id)
                    hostName = kb.hostName
                    break
                  }
                }
              }

              recognizersOfContentCulture.push(recognizer)
            }
          }

          let needPublish = false

          // compare models to update the model if a match found
          // otherwise create a new kb
          let recognizerWithKBId = recognizersOfContentCulture.find((r: Recognizer) => r.getKBId() !== '')
          if (recognizerWithKBId !== undefined) {
            // To see if need update the model
            needPublish = await this.updateKB(currentKB, qnaBuildCore, recognizerWithKBId, delayDuration)
          } else {
            // create a new kb
            needPublish = await this.createKB(currentKB, qnaBuildCore, recognizersOfContentCulture, delayDuration)
          }

          const publishRecognizer = recognizerWithKBId || recognizersOfContentCulture[0]

          if (needPublish) {
            // train and publish kb
            await this.publishKB(qnaBuildCore, publishRecognizer, currentKB.name, delayDuration)
          }

          if (hostName === '') hostName = (await qnaBuildCore.getKB(publishRecognizer.getKBId())).hostName

          hostName += '/qnamaker'

          // update alterations if there are
          if (currentAlt.wordAlterations && currentAlt.wordAlterations.length > 0) {
            this.handler('Replacing alterations...\n')
            await qnaBuildCore.replaceAlt(currentAlt)
          }

          for (const recognizer of recognizersOfContentCulture) {
            // update multiLanguageRecognizer asset
            const dialogName = path.basename(recognizer.getDialogPath(), `.${culture}.qna.dialog`)
            const dialogFileName = path.basename(recognizer.getDialogPath(), '.dialog')
            if (multiRecognizers && multiRecognizers.has(dialogName)) {
              let multiRecognizer = multiRecognizers.get(dialogName) as MultiLanguageRecognizer
              multiRecognizer.recognizers[culture] = dialogFileName
              if (culture.toLowerCase() === fallbackLocale.toLowerCase()) {
                multiRecognizer.recognizers[''] = dialogFileName
              }
            }

            if (crosstrainedRecognizers && crosstrainedRecognizers.has(dialogName)) {
              let crosstrainedRecognizer = crosstrainedRecognizers.get(dialogName) as CrossTrainedRecognizer
              if (!crosstrainedRecognizer.recognizers.includes(dialogName + '.qna')) {
                crosstrainedRecognizer.recognizers.push(dialogName + '.qna')
              }
            }

            // update settings asset
            if (settings) {
              settings.qna[dialogFileName.split('.').join('_').replace(/-/g, '_')] = recognizer.getKBId()
              settings.qna.hostname = hostName
            }
          }
        }))
      }

      // write dialog assets
      if (recognizers) {
        recognizerValues = Array.from(recognizers.values())
      }

      if (multiRecognizers) {
        multiRecognizerValues = Array.from(multiRecognizers.values())
      }

      if (settings) {
        settingsValue = settings as Settings
      }
    }

    if (dialogType === recognizerType.CROSSTRAINED && crosstrainedRecognizers) {
      crosstrainedRecognizerValues = Array.from(crosstrainedRecognizers.values())
    }

    const dialogContents = this.generateDeclarativeAssets(recognizerValues, multiRecognizerValues, settingsValue, crosstrainedRecognizerValues)

    return dialogContents
  }