async function appendPackage()

in src/backend/catalog-builder/catalog-builder.lambda.ts [209:282]


async function appendPackage(packages: any, pkgKey: string, bucketName: string, denyList: DenyListClient) {
  console.log(`Processing key: ${pkgKey}`);
  const [, packageName, versionStr] = constants.STORAGE_KEY_FORMAT_REGEX.exec(pkgKey)!;
  const version = new SemVer(versionStr);
  const found = packages.get(packageName)?.get(version.major);
  // If the version is === to the current latest, we'll be replacing that (so re-generated metadata are taken into account)
  if (found != null && version.compare(found.version) < 0) {
    console.log(`Skipping ${packageName}@${version} because it is not newer than the existing ${found.version}`);
    return;
  }

  console.log(`Checking if ${packageName}@${version.version} matches a deny list rule`);
  const blocked = denyList.lookup(packageName, version.version);
  if (blocked) {
    console.log(`Skipping ${packageName}@${version.version} because it is blocked by the deny list rule: ${JSON.stringify(blocked)}`);
    return;
  }

  console.log(`Registering ${packageName}@${version}`);

  // Donwload the tarball to inspect the `package.json` data therein.
  const pkg = await aws.s3().getObject({ Bucket: bucketName, Key: pkgKey }).promise();
  const metadataKey = pkgKey.replace(constants.PACKAGE_KEY_SUFFIX, constants.METADATA_KEY_SUFFIX);
  const metadataResponse = await aws.s3().getObject({ Bucket: bucketName, Key: metadataKey }).promise();
  const manifest = await new Promise<Buffer>((ok, ko) => {
    gunzip(Buffer.from(pkg.Body! as any), (err, tar) => {
      if (err) {
        return ko(err);
      }
      extract()
        .on('entry', (header, stream, next) => {
          if (header.name !== 'package/package.json') {
            // Not the file we are looking for, skip ahead (next run-loop tick).
            return setImmediate(next);
          }
          const chunks = new Array<Buffer>();
          return stream
            .on('data', (chunk) => chunks.push(Buffer.from(chunk)))
            .once('end', () => {
              ok(Buffer.concat(chunks));
              next();
            })
            .resume();
        })
        .once('finish', () => {
          ko(new Error('Could not find package/package.json in tarball!'));
        })
        .write(tar, (writeErr) => {
          if (writeErr) {
            ko(writeErr);
          }
        });
    });
  });
    // Add the PackageInfo into the working set
  const pkgMetadata = JSON.parse(manifest.toString('utf-8'));
  const npmMetadata = JSON.parse(metadataResponse?.Body?.toString('utf-8') ?? '{}');
  const major = new SemVer(pkgMetadata.version).major;
  if (!packages.has(pkgMetadata.name)) {
    packages.set(pkgMetadata.name, new Map());
  }
  packages.get(pkgMetadata.name)!.set(major, {
    author: pkgMetadata.author,
    description: pkgMetadata.description,
    keywords: pkgMetadata.keywords,
    languages: pkgMetadata.jsii.targets,
    license: pkgMetadata.license,
    major,
    metadata: npmMetadata,
    name: pkgMetadata.name,
    version: pkgMetadata.version,
  });

}