async function build()

in src/common/tools/gen_cache.ts [193:300]


async function build(suiteDir: string) {
  if (!fs.existsSync(suiteDir)) {
    console.error(`Could not find ${suiteDir}`);
    process.exit(1);
  }

  // Load  hashes.json
  const fileHashJsonPath = `${outDir}/hashes.json`;
  let fileHashes: Record<string, string> = {};
  if (fs.existsSync(fileHashJsonPath)) {
    const json = fs.readFileSync(fileHashJsonPath, { encoding: 'utf8' });
    fileHashes = JSON.parse(json);
  }

  // Crawl files and convert paths to be POSIX-style, relative to suiteDir.
  const filesToEnumerate = glob(suiteDir, cacheFileSuffix)
    .map(p => `${suiteDir}/${p}`)
    .sort();

  const fileHasher = new SourceHasher();
  const cacheablePathToTS = new Map<string, string>();
  const errors: Array<string> = [];

  for (const file of filesToEnumerate) {
    const pathWithoutExtension = file.substring(0, file.length - 3);
    const mod = await import(`../../../${pathWithoutExtension}.js`);
    if (mod.d?.serialize !== undefined) {
      const cacheable = mod.d as Cacheable<unknown>;

      {
        // Check for collisions
        const existing = cacheablePathToTS.get(cacheable.path);
        if (existing !== undefined) {
          errors.push(
            `'${cacheable.path}' is emitted by both:
    '${existing}'
and
    '${file}'`
          );
        }
        cacheablePathToTS.set(cacheable.path, file);
      }

      const outPath = `${outDir}/${cacheable.path}`;
      const fileHash = fileHasher.hashOf(file);

      switch (mode) {
        case 'emit': {
          if (!forceRebuild && fileHashes[cacheable.path] === fileHash) {
            if (verbose) {
              console.log(`'${outPath}' is up to date`);
            }
            continue;
          }
          console.log(`building '${outPath}'`);
          const data = await cacheable.build();
          const serialized = cacheable.serialize(data);
          fs.mkdirSync(path.dirname(outPath), { recursive: true });
          fs.writeFileSync(outPath, serialized, 'binary');
          fileHashes[cacheable.path] = fileHash;
          break;
        }
        case 'list': {
          console.log(outPath);
          break;
        }
        case 'validate': {
          if (fileHashes[cacheable.path] !== fileHash) {
            errors.push(
              `'${outPath}' needs rebuilding. Generate with 'npx grunt run:generate-cache'`
            );
          } else if (verbose) {
            console.log(`'${outPath}' is up to date`);
          }
        }
      }
    }
  }

  // Check that there aren't stale files in the cache directory
  for (const file of glob(outDir, '.bin')) {
    if (cacheablePathToTS.get(file) === undefined) {
      switch (mode) {
        case 'emit':
          fs.rmSync(file);
          break;
        case 'validate':
          errors.push(
            `cache file '${outDir}/${file}' is no longer generated. Remove with 'npx grunt run:generate-cache'`
          );
          break;
      }
    }
  }

  // Update hashes.json
  if (mode === 'emit') {
    const json = JSON.stringify(fileHashes, undefined, '  ');
    fs.writeFileSync(fileHashJsonPath, json, { encoding: 'utf8' });
  }

  if (errors.length > 0) {
    for (const error of errors) {
      console.error(error);
    }
    process.exit(1);
  }
}